diff options
Diffstat (limited to 'meta/lib')
198 files changed, 16717 insertions, 4011 deletions
diff --git a/meta/lib/bbconfigbuild/configfragments.py b/meta/lib/bbconfigbuild/configfragments.py new file mode 100644 index 0000000000..61c33ac316 --- /dev/null +++ b/meta/lib/bbconfigbuild/configfragments.py | |||
@@ -0,0 +1,185 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import logging | ||
8 | import os | ||
9 | import sys | ||
10 | import os.path | ||
11 | |||
12 | import bb.utils | ||
13 | |||
14 | from bblayers.common import LayerPlugin | ||
15 | |||
16 | logger = logging.getLogger('bitbake-config-layers') | ||
17 | |||
18 | sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) | ||
19 | |||
20 | def plugin_init(plugins): | ||
21 | return ConfigFragmentsPlugin() | ||
22 | |||
23 | class ConfigFragmentsPlugin(LayerPlugin): | ||
24 | def get_fragment_info(self, path, name): | ||
25 | d = bb.data.init() | ||
26 | d.setVar('BBPATH', self.tinfoil.config_data.getVar('BBPATH')) | ||
27 | bb.parse.handle(path, d, True) | ||
28 | summary = d.getVar('BB_CONF_FRAGMENT_SUMMARY') | ||
29 | description = d.getVar('BB_CONF_FRAGMENT_DESCRIPTION') | ||
30 | if not summary: | ||
31 | raise Exception('Please add a one-line summary as BB_CONF_FRAGMENT_SUMMARY = \"...\" variable at the beginning of {}'.format(path)) | ||
32 | |||
33 | if not description: | ||
34 | raise Exception('Please add a description as BB_CONF_FRAGMENT_DESCRIPTION = \"...\" variable at the beginning of {}'.format(path)) | ||
35 | |||
36 | return summary, description | ||
37 | |||
38 | def discover_fragments(self): | ||
39 | fragments_path_prefix = self.tinfoil.config_data.getVar('OE_FRAGMENTS_PREFIX') | ||
40 | allfragments = {} | ||
41 | for layername in self.bbfile_collections: | ||
42 | layerdir = self.bbfile_collections[layername] | ||
43 | fragments = [] | ||
44 | for topdir, dirs, files in os.walk(os.path.join(layerdir, fragments_path_prefix)): | ||
45 | fragmentdir = os.path.relpath(topdir, os.path.join(layerdir, fragments_path_prefix)) | ||
46 | for fragmentfile in sorted(files): | ||
47 | if fragmentfile.startswith(".") or not fragmentfile.endswith(".conf"): | ||
48 | continue | ||
49 | fragmentname = os.path.normpath("/".join((layername, fragmentdir, fragmentfile.split('.')[0]))) | ||
50 | fragmentpath = os.path.join(topdir, fragmentfile) | ||
51 | fragmentsummary, fragmentdesc = self.get_fragment_info(fragmentpath, fragmentname) | ||
52 | fragments.append({'path':fragmentpath, 'name':fragmentname, 'summary':fragmentsummary, 'description':fragmentdesc}) | ||
53 | if fragments: | ||
54 | allfragments[layername] = {'layerdir':layerdir,'fragments':fragments} | ||
55 | return allfragments | ||
56 | |||
57 | def do_list_fragments(self, args): | ||
58 | """ List available configuration fragments """ | ||
59 | def print_fragment(f, verbose, is_enabled): | ||
60 | if not verbose: | ||
61 | print('{}\t{}'.format(f['name'], f['summary'])) | ||
62 | else: | ||
63 | print('Name: {}\nPath: {}\nEnabled: {}\nSummary: {}\nDescription:\n{}\n'.format(f['name'], f['path'], 'yes' if is_enabled else 'no', f['summary'],''.join(f['description']))) | ||
64 | |||
65 | def print_builtin_fragments(builtin, enabled): | ||
66 | print('Available built-in fragments:') | ||
67 | builtin_dict = {i[0]:i[1] for i in [f.split(':') for f in builtin]} | ||
68 | for prefix,var in builtin_dict.items(): | ||
69 | print('{}/...\tSets {} = ...'.format(prefix, var)) | ||
70 | print('') | ||
71 | enabled_builtin_fragments = [f for f in enabled if self.builtin_fragment_exists(f)] | ||
72 | print('Enabled built-in fragments:') | ||
73 | for f in enabled_builtin_fragments: | ||
74 | prefix, value = f.split('/', 1) | ||
75 | print('{}\tSets {} = "{}"'.format(f, builtin_dict[prefix], value)) | ||
76 | print('') | ||
77 | |||
78 | all_enabled_fragments = (self.tinfoil.config_data.getVar('OE_FRAGMENTS') or "").split() | ||
79 | all_builtin_fragments = (self.tinfoil.config_data.getVar('OE_FRAGMENTS_BUILTIN') or "").split() | ||
80 | print_builtin_fragments(all_builtin_fragments, all_enabled_fragments) | ||
81 | |||
82 | for layername, layerdata in self.discover_fragments().items(): | ||
83 | layerdir = layerdata['layerdir'] | ||
84 | fragments = layerdata['fragments'] | ||
85 | enabled_fragments = [f for f in fragments if f['name'] in all_enabled_fragments] | ||
86 | disabled_fragments = [f for f in fragments if f['name'] not in all_enabled_fragments] | ||
87 | |||
88 | print('Available fragments in {} layer located in {}:\n'.format(layername, layerdir)) | ||
89 | if enabled_fragments: | ||
90 | print('Enabled fragments:') | ||
91 | for f in enabled_fragments: | ||
92 | print_fragment(f, args.verbose, is_enabled=True) | ||
93 | print('') | ||
94 | if disabled_fragments: | ||
95 | print('Unused fragments:') | ||
96 | for f in disabled_fragments: | ||
97 | print_fragment(f, args.verbose, is_enabled=False) | ||
98 | print('') | ||
99 | |||
100 | def fragment_exists(self, fragmentname): | ||
101 | for layername, layerdata in self.discover_fragments().items(): | ||
102 | for f in layerdata['fragments']: | ||
103 | if f['name'] == fragmentname: | ||
104 | return True | ||
105 | return False | ||
106 | |||
107 | def builtin_fragment_exists(self, fragmentname): | ||
108 | fragment_prefix = fragmentname.split("/",1)[0] | ||
109 | fragment_prefix_defs = set([f.split(':')[0] for f in self.tinfoil.config_data.getVar('OE_FRAGMENTS_BUILTIN').split()]) | ||
110 | return fragment_prefix in fragment_prefix_defs | ||
111 | |||
112 | def create_conf(self, confpath): | ||
113 | if not os.path.exists(confpath): | ||
114 | with open(confpath, 'w') as f: | ||
115 | f.write('') | ||
116 | with open(confpath, 'r') as f: | ||
117 | lines = f.read() | ||
118 | if "OE_FRAGMENTS += " not in lines: | ||
119 | lines += "\nOE_FRAGMENTS += \"\"\n" | ||
120 | with open(confpath, 'w') as f: | ||
121 | f.write(lines) | ||
122 | |||
123 | def do_enable_fragment(self, args): | ||
124 | """ Enable a fragment in the local build configuration """ | ||
125 | def enable_helper(varname, origvalue, op, newlines): | ||
126 | enabled_fragments = origvalue.split() | ||
127 | for f in args.fragmentname: | ||
128 | if f in enabled_fragments: | ||
129 | print("Fragment {} already included in {}".format(f, args.confpath)) | ||
130 | else: | ||
131 | enabled_fragments.append(f) | ||
132 | return " ".join(enabled_fragments), None, 0, True | ||
133 | |||
134 | for f in args.fragmentname: | ||
135 | if not self.fragment_exists(f) and not self.builtin_fragment_exists(f): | ||
136 | raise Exception("Fragment {} does not exist; use 'list-fragments' to see the full list.".format(f)) | ||
137 | |||
138 | self.create_conf(args.confpath) | ||
139 | modified = bb.utils.edit_metadata_file(args.confpath, ["OE_FRAGMENTS"], enable_helper) | ||
140 | if modified: | ||
141 | print("Fragment {} added to {}.".format(", ".join(args.fragmentname), args.confpath)) | ||
142 | |||
143 | def do_disable_fragment(self, args): | ||
144 | """ Disable a fragment in the local build configuration """ | ||
145 | def disable_helper(varname, origvalue, op, newlines): | ||
146 | enabled_fragments = origvalue.split() | ||
147 | for f in args.fragmentname: | ||
148 | if f in enabled_fragments: | ||
149 | enabled_fragments.remove(f) | ||
150 | else: | ||
151 | print("Fragment {} not currently enabled in {}".format(f, args.confpath)) | ||
152 | return " ".join(enabled_fragments), None, 0, True | ||
153 | |||
154 | self.create_conf(args.confpath) | ||
155 | modified = bb.utils.edit_metadata_file(args.confpath, ["OE_FRAGMENTS"], disable_helper) | ||
156 | if modified: | ||
157 | print("Fragment {} removed from {}.".format(", ".join(args.fragmentname), args.confpath)) | ||
158 | |||
159 | def do_disable_all_fragments(self, args): | ||
160 | """ Disable all fragments in the local build configuration """ | ||
161 | def disable_all_helper(varname, origvalue, op, newlines): | ||
162 | return "", None, 0, True | ||
163 | |||
164 | self.create_conf(args.confpath) | ||
165 | modified = bb.utils.edit_metadata_file(args.confpath, ["OE_FRAGMENTS"], disable_all_helper) | ||
166 | if modified: | ||
167 | print("All fragments removed from {}.".format(args.confpath)) | ||
168 | |||
169 | def register_commands(self, sp): | ||
170 | default_confpath = os.path.join(os.environ["BBPATH"], "conf/auto.conf") | ||
171 | |||
172 | parser_list_fragments = self.add_command(sp, 'list-fragments', self.do_list_fragments, parserecipes=False) | ||
173 | parser_list_fragments.add_argument("--confpath", default=default_confpath, help='Configuration file which contains a list of enabled fragments (default is {}).'.format(default_confpath)) | ||
174 | parser_list_fragments.add_argument('--verbose', '-v', action='store_true', help='Print extended descriptions of the fragments') | ||
175 | |||
176 | parser_enable_fragment = self.add_command(sp, 'enable-fragment', self.do_enable_fragment, parserecipes=False) | ||
177 | parser_enable_fragment.add_argument("--confpath", default=default_confpath, help='Configuration file which contains a list of enabled fragments (default is {}).'.format(default_confpath)) | ||
178 | parser_enable_fragment.add_argument('fragmentname', help='The name of the fragment (use list-fragments to see them)', nargs='+') | ||
179 | |||
180 | parser_disable_fragment = self.add_command(sp, 'disable-fragment', self.do_disable_fragment, parserecipes=False) | ||
181 | parser_disable_fragment.add_argument("--confpath", default=default_confpath, help='Configuration file which contains a list of enabled fragments (default is {}).'.format(default_confpath)) | ||
182 | parser_disable_fragment.add_argument('fragmentname', help='The name of the fragment', nargs='+') | ||
183 | |||
184 | parser_disable_all = self.add_command(sp, 'disable-all-fragments', self.do_disable_all_fragments, parserecipes=False) | ||
185 | parser_disable_all.add_argument("--confpath", default=default_confpath, help='Configuration file which contains a list of enabled fragments (default is {}).'.format(default_confpath)) | ||
diff --git a/meta/lib/bblayers/machines.py b/meta/lib/bblayers/machines.py new file mode 100644 index 0000000000..5fd970af0e --- /dev/null +++ b/meta/lib/bblayers/machines.py | |||
@@ -0,0 +1,37 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import logging | ||
8 | import pathlib | ||
9 | |||
10 | from bblayers.common import LayerPlugin | ||
11 | |||
12 | logger = logging.getLogger('bitbake-layers') | ||
13 | |||
14 | def plugin_init(plugins): | ||
15 | return ShowMachinesPlugin() | ||
16 | |||
17 | class ShowMachinesPlugin(LayerPlugin): | ||
18 | def do_show_machines(self, args): | ||
19 | """List the machines available in the currently configured layers.""" | ||
20 | |||
21 | for layer_dir in self.bblayers: | ||
22 | layer_name = self.get_layer_name(layer_dir) | ||
23 | |||
24 | if args.layer and args.layer != layer_name: | ||
25 | continue | ||
26 | |||
27 | for p in sorted(pathlib.Path(layer_dir).glob("conf/machine/*.conf")): | ||
28 | if args.bare: | ||
29 | logger.plain("%s" % (p.stem)) | ||
30 | else: | ||
31 | logger.plain("%s (%s)" % (p.stem, layer_name)) | ||
32 | |||
33 | |||
34 | def register_commands(self, sp): | ||
35 | parser_show_machines = self.add_command(sp, "show-machines", self.do_show_machines) | ||
36 | parser_show_machines.add_argument('-b', '--bare', help='output just the machine names, not the source layer', action='store_true') | ||
37 | parser_show_machines.add_argument('-l', '--layer', help='Limit to machines in the specified layer') | ||
diff --git a/meta/lib/bblayers/makesetup.py b/meta/lib/bblayers/makesetup.py index 99d5973760..4199b5f069 100644 --- a/meta/lib/bblayers/makesetup.py +++ b/meta/lib/bblayers/makesetup.py | |||
@@ -48,8 +48,9 @@ class MakeSetupPlugin(LayerPlugin): | |||
48 | if l_name == 'workspace': | 48 | if l_name == 'workspace': |
49 | continue | 49 | continue |
50 | if l_ismodified: | 50 | if l_ismodified: |
51 | logger.error("Layer {name} in {path} has uncommitted modifications or is not in a git repository.".format(name=l_name,path=l_path)) | 51 | e = "Layer {name} in {path} has uncommitted modifications or is not in a git repository.".format(name=l_name,path=l_path) |
52 | return | 52 | logger.error(e) |
53 | raise Exception(e) | ||
53 | repo_path = oe.buildcfg.get_metadata_git_toplevel(l_path) | 54 | repo_path = oe.buildcfg.get_metadata_git_toplevel(l_path) |
54 | 55 | ||
55 | if self._is_submodule(repo_path): | 56 | if self._is_submodule(repo_path): |
@@ -62,9 +63,6 @@ class MakeSetupPlugin(LayerPlugin): | |||
62 | 'describe':oe.buildcfg.get_metadata_git_describe(repo_path)}} | 63 | 'describe':oe.buildcfg.get_metadata_git_describe(repo_path)}} |
63 | if repo_path == destdir_repo: | 64 | if repo_path == destdir_repo: |
64 | repos[repo_path]['contains_this_file'] = True | 65 | repos[repo_path]['contains_this_file'] = True |
65 | if not repos[repo_path]['git-remote']['remotes'] and not repos[repo_path]['contains_this_file']: | ||
66 | logger.error("Layer repository in {path} does not have any remotes configured. Please add at least one with 'git remote add'.".format(path=repo_path)) | ||
67 | return | ||
68 | 66 | ||
69 | top_path = os.path.commonpath([os.path.dirname(r) for r in repos.keys()]) | 67 | top_path = os.path.commonpath([os.path.dirname(r) for r in repos.keys()]) |
70 | 68 | ||
@@ -74,6 +72,7 @@ class MakeSetupPlugin(LayerPlugin): | |||
74 | repos_nopaths[r_nopath] = repos[r] | 72 | repos_nopaths[r_nopath] = repos[r] |
75 | r_relpath = os.path.relpath(r, top_path) | 73 | r_relpath = os.path.relpath(r, top_path) |
76 | repos_nopaths[r_nopath]['path'] = r_relpath | 74 | repos_nopaths[r_nopath]['path'] = r_relpath |
75 | repos_nopaths[r_nopath]['originpath'] = r | ||
77 | return repos_nopaths | 76 | return repos_nopaths |
78 | 77 | ||
79 | def do_make_setup(self, args): | 78 | def do_make_setup(self, args): |
diff --git a/meta/lib/bblayers/setupwriters/oe-setup-layers.py b/meta/lib/bblayers/setupwriters/oe-setup-layers.py index 59ca968ff3..8faeabfabc 100644 --- a/meta/lib/bblayers/setupwriters/oe-setup-layers.py +++ b/meta/lib/bblayers/setupwriters/oe-setup-layers.py | |||
@@ -85,6 +85,11 @@ class OeSetupLayersWriter(): | |||
85 | if not os.path.exists(args.destdir): | 85 | if not os.path.exists(args.destdir): |
86 | os.makedirs(args.destdir) | 86 | os.makedirs(args.destdir) |
87 | repos = parent.make_repo_config(args.destdir) | 87 | repos = parent.make_repo_config(args.destdir) |
88 | for r in repos.values(): | ||
89 | if not r['git-remote']['remotes'] and not r.get('contains_this_file', False): | ||
90 | e = "Layer repository in {path} does not have any remotes configured. Please add at least one with 'git remote add'.".format(path=r['originpath']) | ||
91 | raise Exception(e) | ||
92 | del r['originpath'] | ||
88 | json = {"version":"1.0","sources":repos} | 93 | json = {"version":"1.0","sources":repos} |
89 | if not repos: | 94 | if not repos: |
90 | err = "Could not determine layer sources" | 95 | err = "Could not determine layer sources" |
diff --git a/meta/lib/oe/__init__.py b/meta/lib/oe/__init__.py index 6eb536ad28..73de774266 100644 --- a/meta/lib/oe/__init__.py +++ b/meta/lib/oe/__init__.py | |||
@@ -7,6 +7,9 @@ | |||
7 | from pkgutil import extend_path | 7 | from pkgutil import extend_path |
8 | __path__ = extend_path(__path__, __name__) | 8 | __path__ = extend_path(__path__, __name__) |
9 | 9 | ||
10 | BBIMPORTS = ["data", "path", "utils", "types", "package", "packagedata", \ | 10 | # Modules with vistorcode need to go first else anything depending on them won't be |
11 | "packagegroup", "sstatesig", "lsb", "cachedpath", "license", \ | 11 | # processed correctly (e.g. qa) |
12 | "qa", "reproducible", "rust", "buildcfg", "go"] | 12 | BBIMPORTS = ["qa", "data", "path", "utils", "types", "package", "packagedata", \ |
13 | "packagegroup", "sstatesig", "lsb", "cachedpath", "license", "qemu", \ | ||
14 | "reproducible", "rust", "buildcfg", "go", "spdx30_tasks", "spdx_common", \ | ||
15 | "cve_check", "tune"] | ||
diff --git a/meta/lib/oe/bootfiles.py b/meta/lib/oe/bootfiles.py new file mode 100644 index 0000000000..7ee148c4e2 --- /dev/null +++ b/meta/lib/oe/bootfiles.py | |||
@@ -0,0 +1,57 @@ | |||
1 | # | ||
2 | # SPDX-License-Identifier: MIT | ||
3 | # | ||
4 | # Copyright (C) 2024 Marcus Folkesson | ||
5 | # Author: Marcus Folkesson <marcus.folkesson@gmail.com> | ||
6 | # | ||
7 | # Utility functions handling boot files | ||
8 | # | ||
9 | # Look into deploy_dir and search for boot_files. | ||
10 | # Returns a list of tuples with (original filepath relative to | ||
11 | # deploy_dir, desired filepath renaming) | ||
12 | # | ||
13 | # Heavily inspired of bootimg_partition.py | ||
14 | # | ||
15 | def get_boot_files(deploy_dir, boot_files): | ||
16 | import re | ||
17 | import os | ||
18 | from glob import glob | ||
19 | |||
20 | if boot_files is None: | ||
21 | return None | ||
22 | |||
23 | # list of tuples (src_name, dst_name) | ||
24 | deploy_files = [] | ||
25 | for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): | ||
26 | if ';' in src_entry: | ||
27 | dst_entry = tuple(src_entry.split(';')) | ||
28 | if not dst_entry[0] or not dst_entry[1]: | ||
29 | raise ValueError('Malformed boot file entry: %s' % src_entry) | ||
30 | else: | ||
31 | dst_entry = (src_entry, src_entry) | ||
32 | |||
33 | deploy_files.append(dst_entry) | ||
34 | |||
35 | install_files = [] | ||
36 | for deploy_entry in deploy_files: | ||
37 | src, dst = deploy_entry | ||
38 | if '*' in src: | ||
39 | # by default install files under their basename | ||
40 | entry_name_fn = os.path.basename | ||
41 | if dst != src: | ||
42 | # unless a target name was given, then treat name | ||
43 | # as a directory and append a basename | ||
44 | entry_name_fn = lambda name: \ | ||
45 | os.path.join(dst, | ||
46 | os.path.basename(name)) | ||
47 | |||
48 | srcs = glob(os.path.join(deploy_dir, src)) | ||
49 | |||
50 | for entry in srcs: | ||
51 | src = os.path.relpath(entry, deploy_dir) | ||
52 | entry_dst_name = entry_name_fn(entry) | ||
53 | install_files.append((src, entry_dst_name)) | ||
54 | else: | ||
55 | install_files.append((src, dst)) | ||
56 | |||
57 | return install_files | ||
diff --git a/meta/lib/oe/buildcfg.py b/meta/lib/oe/buildcfg.py index 27b059b834..85b903fab0 100644 --- a/meta/lib/oe/buildcfg.py +++ b/meta/lib/oe/buildcfg.py | |||
@@ -17,21 +17,21 @@ def get_scmbasepath(d): | |||
17 | def get_metadata_git_branch(path): | 17 | def get_metadata_git_branch(path): |
18 | try: | 18 | try: |
19 | rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path) | 19 | rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path) |
20 | except bb.process.ExecutionError: | 20 | except (bb.process.ExecutionError, bb.process.NotFoundError): |
21 | rev = '<unknown>' | 21 | rev = '<unknown>' |
22 | return rev.strip() | 22 | return rev.strip() |
23 | 23 | ||
24 | def get_metadata_git_revision(path): | 24 | def get_metadata_git_revision(path): |
25 | try: | 25 | try: |
26 | rev, _ = bb.process.run('git rev-parse HEAD', cwd=path) | 26 | rev, _ = bb.process.run('git rev-parse HEAD', cwd=path) |
27 | except bb.process.ExecutionError: | 27 | except (bb.process.ExecutionError, bb.process.NotFoundError): |
28 | rev = '<unknown>' | 28 | rev = '<unknown>' |
29 | return rev.strip() | 29 | return rev.strip() |
30 | 30 | ||
31 | def get_metadata_git_toplevel(path): | 31 | def get_metadata_git_toplevel(path): |
32 | try: | 32 | try: |
33 | toplevel, _ = bb.process.run('git rev-parse --show-toplevel', cwd=path) | 33 | toplevel, _ = bb.process.run('git rev-parse --show-toplevel', cwd=path) |
34 | except bb.process.ExecutionError: | 34 | except (bb.process.ExecutionError, bb.process.NotFoundError): |
35 | return "" | 35 | return "" |
36 | return toplevel.strip() | 36 | return toplevel.strip() |
37 | 37 | ||
@@ -39,21 +39,21 @@ def get_metadata_git_remotes(path): | |||
39 | try: | 39 | try: |
40 | remotes_list, _ = bb.process.run('git remote', cwd=path) | 40 | remotes_list, _ = bb.process.run('git remote', cwd=path) |
41 | remotes = remotes_list.split() | 41 | remotes = remotes_list.split() |
42 | except bb.process.ExecutionError: | 42 | except (bb.process.ExecutionError, bb.process.NotFoundError): |
43 | remotes = [] | 43 | remotes = [] |
44 | return remotes | 44 | return remotes |
45 | 45 | ||
46 | def get_metadata_git_remote_url(path, remote): | 46 | def get_metadata_git_remote_url(path, remote): |
47 | try: | 47 | try: |
48 | uri, _ = bb.process.run('git remote get-url {remote}'.format(remote=remote), cwd=path) | 48 | uri, _ = bb.process.run('git remote get-url {remote}'.format(remote=remote), cwd=path) |
49 | except bb.process.ExecutionError: | 49 | except (bb.process.ExecutionError, bb.process.NotFoundError): |
50 | return "" | 50 | return "" |
51 | return uri.strip() | 51 | return uri.strip() |
52 | 52 | ||
53 | def get_metadata_git_describe(path): | 53 | def get_metadata_git_describe(path): |
54 | try: | 54 | try: |
55 | describe, _ = bb.process.run('git describe --tags', cwd=path) | 55 | describe, _ = bb.process.run('git describe --tags --dirty', cwd=path) |
56 | except bb.process.ExecutionError: | 56 | except (bb.process.ExecutionError, bb.process.NotFoundError): |
57 | return "" | 57 | return "" |
58 | return describe.strip() | 58 | return describe.strip() |
59 | 59 | ||
diff --git a/meta/lib/buildstats.py b/meta/lib/oe/buildstats.py index 1ffe679801..2700245ec6 100644 --- a/meta/lib/buildstats.py +++ b/meta/lib/oe/buildstats.py | |||
@@ -10,6 +10,7 @@ | |||
10 | import time | 10 | import time |
11 | import re | 11 | import re |
12 | import bb.event | 12 | import bb.event |
13 | from collections import deque | ||
13 | 14 | ||
14 | class SystemStats: | 15 | class SystemStats: |
15 | def __init__(self, d): | 16 | def __init__(self, d): |
@@ -18,7 +19,8 @@ class SystemStats: | |||
18 | bb.utils.mkdirhier(bsdir) | 19 | bb.utils.mkdirhier(bsdir) |
19 | file_handlers = [('diskstats', self._reduce_diskstats), | 20 | file_handlers = [('diskstats', self._reduce_diskstats), |
20 | ('meminfo', self._reduce_meminfo), | 21 | ('meminfo', self._reduce_meminfo), |
21 | ('stat', self._reduce_stat)] | 22 | ('stat', self._reduce_stat), |
23 | ('net/dev', self._reduce_net)] | ||
22 | 24 | ||
23 | # Some hosts like openSUSE have readable /proc/pressure files | 25 | # Some hosts like openSUSE have readable /proc/pressure files |
24 | # but throw errors when these files are opened. Catch these error | 26 | # but throw errors when these files are opened. Catch these error |
@@ -47,7 +49,10 @@ class SystemStats: | |||
47 | # not strictly necessary, but using it makes the class | 49 | # not strictly necessary, but using it makes the class |
48 | # more robust should two processes ever write | 50 | # more robust should two processes ever write |
49 | # concurrently. | 51 | # concurrently. |
50 | destfile = os.path.join(bsdir, '%sproc_%s.log' % ('reduced_' if handler else '', filename)) | 52 | if filename == 'net/dev': |
53 | destfile = os.path.join(bsdir, 'reduced_proc_net.log') | ||
54 | else: | ||
55 | destfile = os.path.join(bsdir, '%sproc_%s.log' % ('reduced_' if handler else '', filename)) | ||
51 | self.proc_files.append((filename, open(destfile, 'ab'), handler)) | 56 | self.proc_files.append((filename, open(destfile, 'ab'), handler)) |
52 | self.monitor_disk = open(os.path.join(bsdir, 'monitor_disk.log'), 'ab') | 57 | self.monitor_disk = open(os.path.join(bsdir, 'monitor_disk.log'), 'ab') |
53 | # Last time that we sampled /proc data resp. recorded disk monitoring data. | 58 | # Last time that we sampled /proc data resp. recorded disk monitoring data. |
@@ -66,12 +71,13 @@ class SystemStats: | |||
66 | self.min_seconds = 1.0 - self.tolerance | 71 | self.min_seconds = 1.0 - self.tolerance |
67 | 72 | ||
68 | self.meminfo_regex = re.compile(rb'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)') | 73 | self.meminfo_regex = re.compile(rb'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)') |
69 | self.diskstats_regex = re.compile(rb'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$') | 74 | self.diskstats_regex = re.compile(rb'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+|nvme\d+n\d+.*)$') |
70 | self.diskstats_ltime = None | 75 | self.diskstats_ltime = None |
71 | self.diskstats_data = None | 76 | self.diskstats_data = None |
72 | self.stat_ltimes = None | 77 | self.stat_ltimes = None |
73 | # Last time we sampled /proc/pressure. All resources stored in a single dict with the key as filename | 78 | # Last time we sampled /proc/pressure. All resources stored in a single dict with the key as filename |
74 | self.last_pressure = {"pressure/cpu": None, "pressure/io": None, "pressure/memory": None} | 79 | self.last_pressure = {"pressure/cpu": None, "pressure/io": None, "pressure/memory": None} |
80 | self.net_stats = {} | ||
75 | 81 | ||
76 | def close(self): | 82 | def close(self): |
77 | self.monitor_disk.close() | 83 | self.monitor_disk.close() |
@@ -93,8 +99,41 @@ class SystemStats: | |||
93 | b' '.join([values[x] for x in | 99 | b' '.join([values[x] for x in |
94 | (b'MemTotal', b'MemFree', b'Buffers', b'Cached', b'SwapTotal', b'SwapFree')]) + b'\n') | 100 | (b'MemTotal', b'MemFree', b'Buffers', b'Cached', b'SwapTotal', b'SwapFree')]) + b'\n') |
95 | 101 | ||
102 | def _reduce_net(self, time, data, filename): | ||
103 | data = data.split(b'\n') | ||
104 | for line in data[2:]: | ||
105 | if b":" not in line: | ||
106 | continue | ||
107 | try: | ||
108 | parts = line.split() | ||
109 | iface = (parts[0].strip(b':')).decode('ascii') | ||
110 | receive_bytes = int(parts[1]) | ||
111 | transmit_bytes = int(parts[9]) | ||
112 | except Exception: | ||
113 | continue | ||
114 | |||
115 | if iface not in self.net_stats: | ||
116 | self.net_stats[iface] = deque(maxlen=2) | ||
117 | self.net_stats[iface].append((receive_bytes, transmit_bytes, 0, 0)) | ||
118 | prev = self.net_stats[iface][-1] if self.net_stats[iface] else (0, 0, 0, 0) | ||
119 | receive_diff = receive_bytes - prev[0] | ||
120 | transmit_diff = transmit_bytes - prev[1] | ||
121 | self.net_stats[iface].append(( | ||
122 | receive_bytes, | ||
123 | transmit_bytes, | ||
124 | receive_diff, | ||
125 | transmit_diff | ||
126 | )) | ||
127 | |||
128 | result_str = "\n".join( | ||
129 | f"{iface}: {net_data[-1][0]} {net_data[-1][1]} {net_data[-1][2]} {net_data[-1][3]}" | ||
130 | for iface, net_data in self.net_stats.items() | ||
131 | ) + "\n" | ||
132 | |||
133 | return time, result_str.encode('ascii') | ||
134 | |||
96 | def _diskstats_is_relevant_line(self, linetokens): | 135 | def _diskstats_is_relevant_line(self, linetokens): |
97 | if len(linetokens) != 14: | 136 | if len(linetokens) < 14: |
98 | return False | 137 | return False |
99 | disk = linetokens[2] | 138 | disk = linetokens[2] |
100 | return self.diskstats_regex.match(disk) | 139 | return self.diskstats_regex.match(disk) |
diff --git a/meta/lib/oe/cachedpath.py b/meta/lib/oe/cachedpath.py index 0138b791d4..68c85807d9 100644 --- a/meta/lib/oe/cachedpath.py +++ b/meta/lib/oe/cachedpath.py | |||
@@ -111,9 +111,13 @@ class CachedPath(object): | |||
111 | return True | 111 | return True |
112 | return False | 112 | return False |
113 | 113 | ||
114 | # WARNING - this is not currently a drop in replacement since they return False | ||
115 | # rather than raise exceptions. | ||
114 | def stat(self, path): | 116 | def stat(self, path): |
115 | return self.callstat(path) | 117 | return self.callstat(path) |
116 | 118 | ||
119 | # WARNING - this is not currently a drop in replacement since they return False | ||
120 | # rather than raise exceptions. | ||
117 | def lstat(self, path): | 121 | def lstat(self, path): |
118 | return self.calllstat(path) | 122 | return self.calllstat(path) |
119 | 123 | ||
diff --git a/meta/lib/oe/classextend.py b/meta/lib/oe/classextend.py index 5161d33d2d..8ae5d3b715 100644 --- a/meta/lib/oe/classextend.py +++ b/meta/lib/oe/classextend.py | |||
@@ -33,7 +33,7 @@ class ClassExtender(object): | |||
33 | name = name.replace("-" + self.extname, "") | 33 | name = name.replace("-" + self.extname, "") |
34 | if name.startswith("virtual/"): | 34 | if name.startswith("virtual/"): |
35 | # Assume large numbers of dashes means a triplet is present and we don't need to convert | 35 | # Assume large numbers of dashes means a triplet is present and we don't need to convert |
36 | if name.count("-") >= 3 and name.endswith(("-go", "-binutils", "-gcc", "-g++")): | 36 | if name.count("-") >= 3 and name.endswith(("-go",)): |
37 | return name | 37 | return name |
38 | subs = name.split("/", 1)[1] | 38 | subs = name.split("/", 1)[1] |
39 | if not subs.startswith(self.extname): | 39 | if not subs.startswith(self.extname): |
diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py index 81abfbf9e2..ced751b835 100644 --- a/meta/lib/oe/copy_buildsystem.py +++ b/meta/lib/oe/copy_buildsystem.py | |||
@@ -193,13 +193,17 @@ def prune_lockedsigs(excluded_tasks, excluded_targets, lockedsigs, onlynative, p | |||
193 | else: | 193 | else: |
194 | f.write(line) | 194 | f.write(line) |
195 | invalue = False | 195 | invalue = False |
196 | elif line.startswith('SIGGEN_LOCKEDSIGS'): | 196 | elif line.startswith('SIGGEN_LOCKEDSIGS_t'): |
197 | invalue = True | 197 | invalue = True |
198 | f.write(line) | 198 | f.write(line) |
199 | else: | ||
200 | invalue = False | ||
201 | f.write(line) | ||
199 | 202 | ||
200 | def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_output, copy_output=None): | 203 | def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_output, copy_output=None): |
201 | merged = {} | 204 | merged = {} |
202 | arch_order = [] | 205 | arch_order = [] |
206 | otherdata = [] | ||
203 | with open(lockedsigs_main, 'r') as f: | 207 | with open(lockedsigs_main, 'r') as f: |
204 | invalue = None | 208 | invalue = None |
205 | for line in f: | 209 | for line in f: |
@@ -212,6 +216,9 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu | |||
212 | invalue = line[18:].split('=', 1)[0].rstrip() | 216 | invalue = line[18:].split('=', 1)[0].rstrip() |
213 | merged[invalue] = [] | 217 | merged[invalue] = [] |
214 | arch_order.append(invalue) | 218 | arch_order.append(invalue) |
219 | else: | ||
220 | invalue = None | ||
221 | otherdata.append(line) | ||
215 | 222 | ||
216 | with open(lockedsigs_extra, 'r') as f: | 223 | with open(lockedsigs_extra, 'r') as f: |
217 | invalue = None | 224 | invalue = None |
@@ -246,6 +253,7 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu | |||
246 | f.write(' "\n') | 253 | f.write(' "\n') |
247 | fulltypes.append(typename) | 254 | fulltypes.append(typename) |
248 | f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes)) | 255 | f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes)) |
256 | f.write('\n' + ''.join(otherdata)) | ||
249 | 257 | ||
250 | if copy_output: | 258 | if copy_output: |
251 | write_sigs_file(copy_output, list(tocopy.keys()), tocopy) | 259 | write_sigs_file(copy_output, list(tocopy.keys()), tocopy) |
diff --git a/meta/lib/oe/cve_check.py b/meta/lib/oe/cve_check.py index ed5c714cb8..ae194f27cf 100644 --- a/meta/lib/oe/cve_check.py +++ b/meta/lib/oe/cve_check.py | |||
@@ -5,9 +5,11 @@ | |||
5 | # | 5 | # |
6 | 6 | ||
7 | import collections | 7 | import collections |
8 | import re | ||
9 | import itertools | ||
10 | import functools | 8 | import functools |
9 | import itertools | ||
10 | import os.path | ||
11 | import re | ||
12 | import oe.patch | ||
11 | 13 | ||
12 | _Version = collections.namedtuple( | 14 | _Version = collections.namedtuple( |
13 | "_Version", ["release", "patch_l", "pre_l", "pre_v"] | 15 | "_Version", ["release", "patch_l", "pre_l", "pre_v"] |
@@ -71,71 +73,134 @@ def _cmpkey(release, patch_l, pre_l, pre_v): | |||
71 | return _release, _patch, _pre | 73 | return _release, _patch, _pre |
72 | 74 | ||
73 | 75 | ||
74 | def get_patched_cves(d): | 76 | def parse_cve_from_filename(patch_filename): |
75 | """ | 77 | """ |
76 | Get patches that solve CVEs using the "CVE: " tag. | 78 | Parses CVE ID from the filename |
79 | |||
80 | Matches the last "CVE-YYYY-ID" in the file name, also if written | ||
81 | in lowercase. Possible to have multiple CVE IDs in a single | ||
82 | file name, but only the last one will be detected from the file name. | ||
83 | |||
84 | Returns the last CVE ID foudn in the filename. If no CVE ID is found | ||
85 | an empty string is returned. | ||
77 | """ | 86 | """ |
87 | cve_file_name_match = re.compile(r".*(CVE-\d{4}-\d{4,})", re.IGNORECASE) | ||
78 | 88 | ||
79 | import re | 89 | # Check patch file name for CVE ID |
80 | import oe.patch | 90 | fname_match = cve_file_name_match.search(patch_filename) |
91 | return fname_match.group(1).upper() if fname_match else "" | ||
81 | 92 | ||
82 | cve_match = re.compile(r"CVE:( CVE-\d{4}-\d+)+") | ||
83 | 93 | ||
84 | # Matches the last "CVE-YYYY-ID" in the file name, also if written | 94 | def parse_cves_from_patch_contents(patch_contents): |
85 | # in lowercase. Possible to have multiple CVE IDs in a single | 95 | """ |
86 | # file name, but only the last one will be detected from the file name. | 96 | Parses CVE IDs from patch contents |
87 | # However, patch files contents addressing multiple CVE IDs are supported | ||
88 | # (cve_match regular expression) | ||
89 | cve_file_name_match = re.compile(r".*(CVE-\d{4}-\d+)", re.IGNORECASE) | ||
90 | 97 | ||
91 | patched_cves = set() | 98 | Matches all CVE IDs contained on a line that starts with "CVE: ". Any |
92 | patches = oe.patch.src_patches(d) | 99 | delimiter (',', '&', "and", etc.) can be used without any issues. Multiple |
93 | bb.debug(2, "Scanning %d patches for CVEs" % len(patches)) | 100 | "CVE:" lines can also exist. |
94 | for url in patches: | ||
95 | patch_file = bb.fetch.decodeurl(url)[2] | ||
96 | 101 | ||
97 | # Check patch file name for CVE ID | 102 | Returns a set of all CVE IDs found in the patch contents. |
98 | fname_match = cve_file_name_match.search(patch_file) | 103 | """ |
99 | if fname_match: | 104 | cve_ids = set() |
100 | cve = fname_match.group(1).upper() | 105 | cve_match = re.compile(r"CVE-\d{4}-\d{4,}") |
101 | patched_cves.add(cve) | 106 | # Search for one or more "CVE: " lines |
102 | bb.debug(2, "Found %s from patch file name %s" % (cve, patch_file)) | 107 | for line in patch_contents.split("\n"): |
103 | 108 | if not line.startswith("CVE:"): | |
104 | # Remote patches won't be present and compressed patches won't be | ||
105 | # unpacked, so say we're not scanning them | ||
106 | if not os.path.isfile(patch_file): | ||
107 | bb.note("%s is remote or compressed, not scanning content" % patch_file) | ||
108 | continue | 109 | continue |
110 | cve_ids.update(cve_match.findall(line)) | ||
111 | return cve_ids | ||
112 | |||
109 | 113 | ||
110 | with open(patch_file, "r", encoding="utf-8") as f: | 114 | def parse_cves_from_patch_file(patch_file): |
111 | try: | 115 | """ |
116 | Parses CVE IDs associated with a particular patch file, using both the filename | ||
117 | and patch contents. | ||
118 | |||
119 | Returns a set of all CVE IDs found in the patch filename and contents. | ||
120 | """ | ||
121 | cve_ids = set() | ||
122 | filename_cve = parse_cve_from_filename(patch_file) | ||
123 | if filename_cve: | ||
124 | bb.debug(2, "Found %s from patch file name %s" % (filename_cve, patch_file)) | ||
125 | cve_ids.add(parse_cve_from_filename(patch_file)) | ||
126 | |||
127 | # Remote patches won't be present and compressed patches won't be | ||
128 | # unpacked, so say we're not scanning them | ||
129 | if not os.path.isfile(patch_file): | ||
130 | bb.note("%s is remote or compressed, not scanning content" % patch_file) | ||
131 | return cve_ids | ||
132 | |||
133 | with open(patch_file, "r", encoding="utf-8") as f: | ||
134 | try: | ||
135 | patch_text = f.read() | ||
136 | except UnicodeDecodeError: | ||
137 | bb.debug( | ||
138 | 1, | ||
139 | "Failed to read patch %s using UTF-8 encoding" | ||
140 | " trying with iso8859-1" % patch_file, | ||
141 | ) | ||
142 | f.close() | ||
143 | with open(patch_file, "r", encoding="iso8859-1") as f: | ||
112 | patch_text = f.read() | 144 | patch_text = f.read() |
113 | except UnicodeDecodeError: | 145 | |
114 | bb.debug(1, "Failed to read patch %s using UTF-8 encoding" | 146 | cve_ids.update(parse_cves_from_patch_contents(patch_text)) |
115 | " trying with iso8859-1" % patch_file) | 147 | |
116 | f.close() | 148 | if not cve_ids: |
117 | with open(patch_file, "r", encoding="iso8859-1") as f: | 149 | bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file) |
118 | patch_text = f.read() | 150 | else: |
119 | 151 | bb.debug(2, "Patch %s solves %s" % (patch_file, ", ".join(sorted(cve_ids)))) | |
120 | # Search for one or more "CVE: " lines | 152 | |
121 | text_match = False | 153 | return cve_ids |
122 | for match in cve_match.finditer(patch_text): | 154 | |
123 | # Get only the CVEs without the "CVE: " tag | 155 | |
124 | cves = patch_text[match.start()+5:match.end()] | 156 | @bb.parse.vardeps("CVE_STATUS") |
125 | for cve in cves.split(): | 157 | def get_patched_cves(d): |
126 | bb.debug(2, "Patch %s solves %s" % (patch_file, cve)) | 158 | """ |
127 | patched_cves.add(cve) | 159 | Determines the CVE IDs that have been solved by either patches incuded within |
128 | text_match = True | 160 | SRC_URI or by setting CVE_STATUS. |
129 | 161 | ||
130 | if not fname_match and not text_match: | 162 | Returns a dictionary with the CVE IDs as keys and an associated dictonary of |
131 | bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file) | 163 | relevant metadata as the value. |
164 | """ | ||
165 | patched_cves = {} | ||
166 | patches = oe.patch.src_patches(d) | ||
167 | bb.debug(2, "Scanning %d patches for CVEs" % len(patches)) | ||
168 | |||
169 | # Check each patch file | ||
170 | for url in patches: | ||
171 | patch_file = bb.fetch.decodeurl(url)[2] | ||
172 | for cve_id in parse_cves_from_patch_file(patch_file): | ||
173 | if cve_id not in patched_cves: | ||
174 | patched_cves[cve_id] = { | ||
175 | "abbrev-status": "Patched", | ||
176 | "status": "fix-file-included", | ||
177 | "resource": [patch_file], | ||
178 | } | ||
179 | else: | ||
180 | patched_cves[cve_id]["resource"].append(patch_file) | ||
132 | 181 | ||
133 | # Search for additional patched CVEs | 182 | # Search for additional patched CVEs |
134 | for cve in (d.getVarFlags("CVE_STATUS") or {}): | 183 | for cve_id in d.getVarFlags("CVE_STATUS") or {}: |
135 | decoded_status, _, _ = decode_cve_status(d, cve) | 184 | decoded_status = decode_cve_status(d, cve_id) |
136 | if decoded_status == "Patched": | 185 | products = d.getVar("CVE_PRODUCT") |
137 | bb.debug(2, "CVE %s is additionally patched" % cve) | 186 | if has_cve_product_match(decoded_status, products): |
138 | patched_cves.add(cve) | 187 | if cve_id in patched_cves: |
188 | bb.warn( | ||
189 | 'CVE_STATUS[%s] = "%s" is overwriting previous status of "%s: %s"' | ||
190 | % ( | ||
191 | cve_id, | ||
192 | d.getVarFlag("CVE_STATUS", cve_id), | ||
193 | patched_cves[cve_id]["abbrev-status"], | ||
194 | patched_cves[cve_id]["status"], | ||
195 | ) | ||
196 | ) | ||
197 | patched_cves[cve_id] = { | ||
198 | "abbrev-status": decoded_status["mapping"], | ||
199 | "status": decoded_status["detail"], | ||
200 | "justification": decoded_status["description"], | ||
201 | "affected-vendor": decoded_status["vendor"], | ||
202 | "affected-product": decoded_status["product"], | ||
203 | } | ||
139 | 204 | ||
140 | return patched_cves | 205 | return patched_cves |
141 | 206 | ||
@@ -225,21 +290,89 @@ def convert_cve_version(version): | |||
225 | 290 | ||
226 | return version + update | 291 | return version + update |
227 | 292 | ||
293 | @bb.parse.vardeps("CVE_STATUS", "CVE_CHECK_STATUSMAP") | ||
228 | def decode_cve_status(d, cve): | 294 | def decode_cve_status(d, cve): |
229 | """ | 295 | """ |
230 | Convert CVE_STATUS into status, detail and description. | 296 | Convert CVE_STATUS into status, vendor, product, detail and description. |
231 | """ | 297 | """ |
232 | status = d.getVarFlag("CVE_STATUS", cve) | 298 | status = d.getVarFlag("CVE_STATUS", cve) |
233 | if not status: | 299 | if not status: |
234 | return ("", "", "") | 300 | return {} |
301 | |||
302 | status_split = status.split(':', 4) | ||
303 | status_out = {} | ||
304 | status_out["detail"] = status_split[0] | ||
305 | product = "*" | ||
306 | vendor = "*" | ||
307 | description = "" | ||
308 | if len(status_split) >= 4 and status_split[1].strip() == "cpe": | ||
309 | # Both vendor and product are mandatory if cpe: present, the syntax is then: | ||
310 | # detail: cpe:vendor:product:description | ||
311 | vendor = status_split[2].strip() | ||
312 | product = status_split[3].strip() | ||
313 | description = status_split[4].strip() | ||
314 | elif len(status_split) >= 2 and status_split[1].strip() == "cpe": | ||
315 | # Malformed CPE | ||
316 | bb.warn( | ||
317 | 'Invalid CPE information for CVE_STATUS[%s] = "%s", not setting CPE' | ||
318 | % (cve, status) | ||
319 | ) | ||
320 | else: | ||
321 | # Other case: no CPE, the syntax is then: | ||
322 | # detail: description | ||
323 | description = status.split(':', 1)[1].strip() if (len(status_split) > 1) else "" | ||
235 | 324 | ||
236 | status_split = status.split(':', 1) | 325 | status_out["vendor"] = vendor |
237 | detail = status_split[0] | 326 | status_out["product"] = product |
238 | description = status_split[1].strip() if (len(status_split) > 1) else "" | 327 | status_out["description"] = description |
239 | 328 | ||
329 | detail = status_out["detail"] | ||
240 | status_mapping = d.getVarFlag("CVE_CHECK_STATUSMAP", detail) | 330 | status_mapping = d.getVarFlag("CVE_CHECK_STATUSMAP", detail) |
241 | if status_mapping is None: | 331 | if status_mapping is None: |
242 | bb.warn('Invalid detail "%s" for CVE_STATUS[%s] = "%s", fallback to Unpatched' % (detail, cve, status)) | 332 | bb.warn( |
333 | 'Invalid detail "%s" for CVE_STATUS[%s] = "%s", fallback to Unpatched' | ||
334 | % (detail, cve, status) | ||
335 | ) | ||
243 | status_mapping = "Unpatched" | 336 | status_mapping = "Unpatched" |
337 | status_out["mapping"] = status_mapping | ||
338 | |||
339 | return status_out | ||
244 | 340 | ||
245 | return (status_mapping, detail, description) | 341 | def has_cve_product_match(detailed_status, products): |
342 | """ | ||
343 | Check product/vendor match between detailed_status from decode_cve_status and a string of | ||
344 | products (like from CVE_PRODUCT) | ||
345 | """ | ||
346 | for product in products.split(): | ||
347 | vendor = "*" | ||
348 | if ":" in product: | ||
349 | vendor, product = product.split(":", 1) | ||
350 | |||
351 | if (vendor == detailed_status["vendor"] or detailed_status["vendor"] == "*") and \ | ||
352 | (product == detailed_status["product"] or detailed_status["product"] == "*"): | ||
353 | return True | ||
354 | |||
355 | #if no match, return False | ||
356 | return False | ||
357 | |||
358 | def extend_cve_status(d): | ||
359 | # do this only once in case multiple classes use this | ||
360 | if d.getVar("CVE_STATUS_EXTENDED"): | ||
361 | return | ||
362 | d.setVar("CVE_STATUS_EXTENDED", "1") | ||
363 | |||
364 | # Fallback all CVEs from CVE_CHECK_IGNORE to CVE_STATUS | ||
365 | cve_check_ignore = d.getVar("CVE_CHECK_IGNORE") | ||
366 | if cve_check_ignore: | ||
367 | bb.warn("CVE_CHECK_IGNORE is deprecated in favor of CVE_STATUS") | ||
368 | for cve in (d.getVar("CVE_CHECK_IGNORE") or "").split(): | ||
369 | d.setVarFlag("CVE_STATUS", cve, "ignored") | ||
370 | |||
371 | # Process CVE_STATUS_GROUPS to set multiple statuses and optional detail or description at once | ||
372 | for cve_status_group in (d.getVar("CVE_STATUS_GROUPS") or "").split(): | ||
373 | cve_group = d.getVar(cve_status_group) | ||
374 | if cve_group is not None: | ||
375 | for cve in cve_group.split(): | ||
376 | d.setVarFlag("CVE_STATUS", cve, d.getVarFlag(cve_status_group, "status")) | ||
377 | else: | ||
378 | bb.warn("CVE_STATUS_GROUPS contains undefined variable %s" % cve_status_group) | ||
diff --git a/meta/lib/oe/elf.py b/meta/lib/oe/elf.py index eab2349a4f..9794453092 100644 --- a/meta/lib/oe/elf.py +++ b/meta/lib/oe/elf.py | |||
@@ -5,141 +5,144 @@ | |||
5 | # | 5 | # |
6 | 6 | ||
7 | def machine_dict(d): | 7 | def machine_dict(d): |
8 | # TARGET_OS TARGET_ARCH MACHINE, OSABI, ABIVERSION, Little Endian, 32bit? | 8 | # Generating this data is slow, so cache it |
9 | machdata = { | 9 | if not hasattr(machine_dict, "machdata"): |
10 | "darwin9" : { | 10 | machine_dict.machdata = { |
11 | "arm" : (40, 0, 0, True, 32), | 11 | # TARGET_OS TARGET_ARCH MACHINE, OSABI, ABIVERSION, Little Endian, 32bit? |
12 | }, | 12 | "darwin9" : { |
13 | "eabi" : { | 13 | "arm" : (40, 0, 0, True, 32), |
14 | "arm" : (40, 0, 0, True, 32), | 14 | }, |
15 | }, | 15 | "eabi" : { |
16 | "elf" : { | 16 | "arm" : (40, 0, 0, True, 32), |
17 | "aarch64" : (183, 0, 0, True, 64), | 17 | }, |
18 | "aarch64_be" :(183, 0, 0, False, 64), | 18 | "elf" : { |
19 | "i586" : (3, 0, 0, True, 32), | 19 | "aarch64" : (183, 0, 0, True, 64), |
20 | "i686" : (3, 0, 0, True, 32), | 20 | "aarch64_be" :(183, 0, 0, False, 64), |
21 | "x86_64": (62, 0, 0, True, 64), | 21 | "i586" : (3, 0, 0, True, 32), |
22 | "epiphany": (4643, 0, 0, True, 32), | 22 | "i686" : (3, 0, 0, True, 32), |
23 | "lm32": (138, 0, 0, False, 32), | 23 | "x86_64": (62, 0, 0, True, 64), |
24 | "loongarch64":(258, 0, 0, True, 64), | 24 | "epiphany": (4643, 0, 0, True, 32), |
25 | "mips": ( 8, 0, 0, False, 32), | 25 | "lm32": (138, 0, 0, False, 32), |
26 | "mipsel": ( 8, 0, 0, True, 32), | 26 | "loongarch64":(258, 0, 0, True, 64), |
27 | "microblaze": (189, 0, 0, False, 32), | 27 | "mips": ( 8, 0, 0, False, 32), |
28 | "microblazeel":(189, 0, 0, True, 32), | 28 | "mipsel": ( 8, 0, 0, True, 32), |
29 | "powerpc": (20, 0, 0, False, 32), | 29 | "microblaze": (189, 0, 0, False, 32), |
30 | "riscv32": (243, 0, 0, True, 32), | 30 | "microblazeel":(189, 0, 0, True, 32), |
31 | "riscv64": (243, 0, 0, True, 64), | 31 | "powerpc": (20, 0, 0, False, 32), |
32 | }, | 32 | "riscv32": (243, 0, 0, True, 32), |
33 | "linux" : { | 33 | "riscv64": (243, 0, 0, True, 64), |
34 | "aarch64" : (183, 0, 0, True, 64), | 34 | }, |
35 | "aarch64_be" :(183, 0, 0, False, 64), | 35 | "linux" : { |
36 | "arm" : (40, 97, 0, True, 32), | 36 | "aarch64" : (183, 0, 0, True, 64), |
37 | "armeb": (40, 97, 0, False, 32), | 37 | "aarch64_be" :(183, 0, 0, False, 64), |
38 | "powerpc": (20, 0, 0, False, 32), | 38 | "arm" : (40, 97, 0, True, 32), |
39 | "powerpc64": (21, 0, 0, False, 64), | 39 | "armeb": (40, 97, 0, False, 32), |
40 | "powerpc64le": (21, 0, 0, True, 64), | 40 | "powerpc": (20, 0, 0, False, 32), |
41 | "i386": ( 3, 0, 0, True, 32), | 41 | "powerpc64": (21, 0, 0, False, 64), |
42 | "i486": ( 3, 0, 0, True, 32), | 42 | "powerpc64le": (21, 0, 0, True, 64), |
43 | "i586": ( 3, 0, 0, True, 32), | 43 | "i386": ( 3, 0, 0, True, 32), |
44 | "i686": ( 3, 0, 0, True, 32), | 44 | "i486": ( 3, 0, 0, True, 32), |
45 | "x86_64": (62, 0, 0, True, 64), | 45 | "i586": ( 3, 0, 0, True, 32), |
46 | "ia64": (50, 0, 0, True, 64), | 46 | "i686": ( 3, 0, 0, True, 32), |
47 | "alpha": (36902, 0, 0, True, 64), | 47 | "x86_64": (62, 0, 0, True, 64), |
48 | "hppa": (15, 3, 0, False, 32), | 48 | "ia64": (50, 0, 0, True, 64), |
49 | "loongarch64":(258, 0, 0, True, 64), | 49 | "alpha": (36902, 0, 0, True, 64), |
50 | "m68k": ( 4, 0, 0, False, 32), | 50 | "hppa": (15, 3, 0, False, 32), |
51 | "mips": ( 8, 0, 0, False, 32), | 51 | "loongarch64":(258, 0, 0, True, 64), |
52 | "mipsel": ( 8, 0, 0, True, 32), | 52 | "m68k": ( 4, 0, 0, False, 32), |
53 | "mips64": ( 8, 0, 0, False, 64), | 53 | "mips": ( 8, 0, 0, False, 32), |
54 | "mips64el": ( 8, 0, 0, True, 64), | 54 | "mipsel": ( 8, 0, 0, True, 32), |
55 | "mipsisa32r6": ( 8, 0, 0, False, 32), | 55 | "mips64": ( 8, 0, 0, False, 64), |
56 | "mipsisa32r6el": ( 8, 0, 0, True, 32), | 56 | "mips64el": ( 8, 0, 0, True, 64), |
57 | "mipsisa64r6": ( 8, 0, 0, False, 64), | 57 | "mipsisa32r6": ( 8, 0, 0, False, 32), |
58 | "mipsisa64r6el": ( 8, 0, 0, True, 64), | 58 | "mipsisa32r6el": ( 8, 0, 0, True, 32), |
59 | "nios2": (113, 0, 0, True, 32), | 59 | "mipsisa64r6": ( 8, 0, 0, False, 64), |
60 | "riscv32": (243, 0, 0, True, 32), | 60 | "mipsisa64r6el": ( 8, 0, 0, True, 64), |
61 | "riscv64": (243, 0, 0, True, 64), | 61 | "nios2": (113, 0, 0, True, 32), |
62 | "s390": (22, 0, 0, False, 32), | 62 | "riscv32": (243, 0, 0, True, 32), |
63 | "sh4": (42, 0, 0, True, 32), | 63 | "riscv64": (243, 0, 0, True, 64), |
64 | "sparc": ( 2, 0, 0, False, 32), | 64 | "s390": (22, 0, 0, False, 32), |
65 | "microblaze": (189, 0, 0, False, 32), | 65 | "sh4": (42, 0, 0, True, 32), |
66 | "microblazeel":(189, 0, 0, True, 32), | 66 | "sparc": ( 2, 0, 0, False, 32), |
67 | }, | 67 | "microblaze": (189, 0, 0, False, 32), |
68 | "linux-android" : { | 68 | "microblazeel":(189, 0, 0, True, 32), |
69 | "aarch64" : (183, 0, 0, True, 64), | 69 | }, |
70 | "i686": ( 3, 0, 0, True, 32), | 70 | "linux-android" : { |
71 | "x86_64": (62, 0, 0, True, 64), | 71 | "aarch64" : (183, 0, 0, True, 64), |
72 | }, | 72 | "i686": ( 3, 0, 0, True, 32), |
73 | "linux-androideabi" : { | 73 | "x86_64": (62, 0, 0, True, 64), |
74 | "arm" : (40, 97, 0, True, 32), | 74 | }, |
75 | }, | 75 | "linux-androideabi" : { |
76 | "linux-musl" : { | 76 | "arm" : (40, 97, 0, True, 32), |
77 | "aarch64" : (183, 0, 0, True, 64), | 77 | }, |
78 | "aarch64_be" :(183, 0, 0, False, 64), | 78 | "linux-musl" : { |
79 | "arm" : ( 40, 97, 0, True, 32), | 79 | "aarch64" : (183, 0, 0, True, 64), |
80 | "armeb": ( 40, 97, 0, False, 32), | 80 | "aarch64_be" :(183, 0, 0, False, 64), |
81 | "powerpc": ( 20, 0, 0, False, 32), | 81 | "arm" : ( 40, 97, 0, True, 32), |
82 | "powerpc64": ( 21, 0, 0, False, 64), | 82 | "armeb": ( 40, 97, 0, False, 32), |
83 | "powerpc64le": (21, 0, 0, True, 64), | 83 | "powerpc": ( 20, 0, 0, False, 32), |
84 | "i386": ( 3, 0, 0, True, 32), | 84 | "powerpc64": ( 21, 0, 0, False, 64), |
85 | "i486": ( 3, 0, 0, True, 32), | 85 | "powerpc64le": (21, 0, 0, True, 64), |
86 | "i586": ( 3, 0, 0, True, 32), | 86 | "i386": ( 3, 0, 0, True, 32), |
87 | "i686": ( 3, 0, 0, True, 32), | 87 | "i486": ( 3, 0, 0, True, 32), |
88 | "x86_64": ( 62, 0, 0, True, 64), | 88 | "i586": ( 3, 0, 0, True, 32), |
89 | "mips": ( 8, 0, 0, False, 32), | 89 | "i686": ( 3, 0, 0, True, 32), |
90 | "mipsel": ( 8, 0, 0, True, 32), | 90 | "x86_64": ( 62, 0, 0, True, 64), |
91 | "mips64": ( 8, 0, 0, False, 64), | 91 | "loongarch64":( 258, 0, 0, True, 64), |
92 | "mips64el": ( 8, 0, 0, True, 64), | 92 | "mips": ( 8, 0, 0, False, 32), |
93 | "microblaze": (189, 0, 0, False, 32), | 93 | "mipsel": ( 8, 0, 0, True, 32), |
94 | "microblazeel":(189, 0, 0, True, 32), | 94 | "mips64": ( 8, 0, 0, False, 64), |
95 | "riscv32": (243, 0, 0, True, 32), | 95 | "mips64el": ( 8, 0, 0, True, 64), |
96 | "riscv64": (243, 0, 0, True, 64), | 96 | "microblaze": (189, 0, 0, False, 32), |
97 | "sh4": ( 42, 0, 0, True, 32), | 97 | "microblazeel":(189, 0, 0, True, 32), |
98 | }, | 98 | "riscv32": (243, 0, 0, True, 32), |
99 | "uclinux-uclibc" : { | 99 | "riscv64": (243, 0, 0, True, 64), |
100 | "bfin": ( 106, 0, 0, True, 32), | 100 | "sh4": ( 42, 0, 0, True, 32), |
101 | }, | 101 | }, |
102 | "linux-gnueabi" : { | 102 | "uclinux-uclibc" : { |
103 | "arm" : (40, 0, 0, True, 32), | 103 | "bfin": ( 106, 0, 0, True, 32), |
104 | "armeb" : (40, 0, 0, False, 32), | 104 | }, |
105 | }, | 105 | "linux-gnueabi" : { |
106 | "linux-musleabi" : { | 106 | "arm" : (40, 0, 0, True, 32), |
107 | "arm" : (40, 0, 0, True, 32), | 107 | "armeb" : (40, 0, 0, False, 32), |
108 | "armeb" : (40, 0, 0, False, 32), | 108 | }, |
109 | }, | 109 | "linux-musleabi" : { |
110 | "linux-gnuspe" : { | 110 | "arm" : (40, 0, 0, True, 32), |
111 | "powerpc": (20, 0, 0, False, 32), | 111 | "armeb" : (40, 0, 0, False, 32), |
112 | }, | 112 | }, |
113 | "linux-muslspe" : { | 113 | "linux-gnuspe" : { |
114 | "powerpc": (20, 0, 0, False, 32), | 114 | "powerpc": (20, 0, 0, False, 32), |
115 | }, | 115 | }, |
116 | "linux-gnu" : { | 116 | "linux-muslspe" : { |
117 | "powerpc": (20, 0, 0, False, 32), | 117 | "powerpc": (20, 0, 0, False, 32), |
118 | "sh4": (42, 0, 0, True, 32), | 118 | }, |
119 | }, | 119 | "linux-gnu" : { |
120 | "linux-gnu_ilp32" : { | 120 | "powerpc": (20, 0, 0, False, 32), |
121 | "aarch64" : (183, 0, 0, True, 32), | 121 | "sh4": (42, 0, 0, True, 32), |
122 | }, | 122 | }, |
123 | "linux-gnux32" : { | 123 | "linux-gnu_ilp32" : { |
124 | "x86_64": (62, 0, 0, True, 32), | 124 | "aarch64" : (183, 0, 0, True, 32), |
125 | }, | 125 | }, |
126 | "linux-muslx32" : { | 126 | "linux-gnux32" : { |
127 | "x86_64": (62, 0, 0, True, 32), | 127 | "x86_64": (62, 0, 0, True, 32), |
128 | }, | 128 | }, |
129 | "linux-gnun32" : { | 129 | "linux-muslx32" : { |
130 | "mips64": ( 8, 0, 0, False, 32), | 130 | "x86_64": (62, 0, 0, True, 32), |
131 | "mips64el": ( 8, 0, 0, True, 32), | 131 | }, |
132 | "mipsisa64r6": ( 8, 0, 0, False, 32), | 132 | "linux-gnun32" : { |
133 | "mipsisa64r6el":( 8, 0, 0, True, 32), | 133 | "mips64": ( 8, 0, 0, False, 32), |
134 | }, | 134 | "mips64el": ( 8, 0, 0, True, 32), |
135 | } | 135 | "mipsisa64r6": ( 8, 0, 0, False, 32), |
136 | "mipsisa64r6el":( 8, 0, 0, True, 32), | ||
137 | }, | ||
138 | } | ||
136 | 139 | ||
137 | # Add in any extra user supplied data which may come from a BSP layer, removing the | 140 | # Add in any extra user supplied data which may come from a BSP layer, removing the |
138 | # need to always change this class directly | 141 | # need to always change this class directly |
139 | extra_machdata = (d and d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS" or None) or "").split() | 142 | extra_machdata = (d and d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS" or None) or "").split() |
140 | for m in extra_machdata: | 143 | for m in extra_machdata: |
141 | call = m + "(machdata, d)" | 144 | call = m + "(machdata, d)" |
142 | locs = { "machdata" : machdata, "d" : d} | 145 | locs = { "machdata" : machine_dict.machdata, "d" : d} |
143 | machdata = bb.utils.better_eval(call, locs) | 146 | machine_dict.machdata = bb.utils.better_eval(call, locs) |
144 | 147 | ||
145 | return machdata | 148 | return machine_dict.machdata |
diff --git a/meta/lib/oe/fitimage.py b/meta/lib/oe/fitimage.py new file mode 100644 index 0000000000..f303799155 --- /dev/null +++ b/meta/lib/oe/fitimage.py | |||
@@ -0,0 +1,547 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | # This file contains common functions for the fitimage generation | ||
7 | |||
8 | import os | ||
9 | import shlex | ||
10 | import subprocess | ||
11 | import bb | ||
12 | |||
13 | from oeqa.utils.commands import runCmd | ||
14 | |||
15 | class ItsNode: | ||
16 | INDENT_SIZE = 8 | ||
17 | |||
18 | def __init__(self, name, parent_node, sub_nodes=None, properties=None): | ||
19 | self.name = name | ||
20 | self.parent_node = parent_node | ||
21 | |||
22 | self.sub_nodes = [] | ||
23 | if sub_nodes: | ||
24 | self.sub_nodes = sub_nodes | ||
25 | |||
26 | self.properties = {} | ||
27 | if properties: | ||
28 | self.properties = properties | ||
29 | |||
30 | if parent_node: | ||
31 | parent_node.add_sub_node(self) | ||
32 | |||
33 | def add_sub_node(self, sub_node): | ||
34 | self.sub_nodes.append(sub_node) | ||
35 | |||
36 | def add_property(self, key, value): | ||
37 | self.properties[key] = value | ||
38 | |||
39 | def emit(self, f, indent): | ||
40 | indent_str_name = " " * indent | ||
41 | indent_str_props = " " * (indent + self.INDENT_SIZE) | ||
42 | f.write("%s%s {\n" % (indent_str_name, self.name)) | ||
43 | for key, value in self.properties.items(): | ||
44 | bb.debug(1, "key: %s, value: %s" % (key, str(value))) | ||
45 | # Single integer: <0x12ab> | ||
46 | if isinstance(value, int): | ||
47 | f.write(indent_str_props + key + ' = <0x%x>;\n' % value) | ||
48 | # list of strings: "string1", "string2" or integers: <0x12ab 0x34cd> | ||
49 | elif isinstance(value, list): | ||
50 | if len(value) == 0: | ||
51 | f.write(indent_str_props + key + ' = "";\n') | ||
52 | elif isinstance(value[0], int): | ||
53 | list_entries = ' '.join('0x%x' % entry for entry in value) | ||
54 | f.write(indent_str_props + key + ' = <%s>;\n' % list_entries) | ||
55 | else: | ||
56 | list_entries = ', '.join('"%s"' % entry for entry in value) | ||
57 | f.write(indent_str_props + key + ' = %s;\n' % list_entries) | ||
58 | elif isinstance(value, str): | ||
59 | # path: /incbin/("path/to/file") | ||
60 | if key in ["data"] and value.startswith('/incbin/('): | ||
61 | f.write(indent_str_props + key + ' = %s;\n' % value) | ||
62 | # Integers which are already string formatted | ||
63 | elif value.startswith("<") and value.endswith(">"): | ||
64 | f.write(indent_str_props + key + ' = %s;\n' % value) | ||
65 | else: | ||
66 | f.write(indent_str_props + key + ' = "%s";\n' % value) | ||
67 | else: | ||
68 | bb.fatal("%s has unexpexted data type." % str(value)) | ||
69 | for sub_node in self.sub_nodes: | ||
70 | sub_node.emit(f, indent + self.INDENT_SIZE) | ||
71 | f.write(indent_str_name + '};\n') | ||
72 | |||
73 | class ItsNodeImages(ItsNode): | ||
74 | def __init__(self, parent_node): | ||
75 | super().__init__("images", parent_node) | ||
76 | |||
77 | class ItsNodeConfigurations(ItsNode): | ||
78 | def __init__(self, parent_node): | ||
79 | super().__init__("configurations", parent_node) | ||
80 | |||
81 | class ItsNodeHash(ItsNode): | ||
82 | def __init__(self, name, parent_node, algo, opt_props=None): | ||
83 | properties = { | ||
84 | "algo": algo | ||
85 | } | ||
86 | if opt_props: | ||
87 | properties.update(opt_props) | ||
88 | super().__init__(name, parent_node, None, properties) | ||
89 | |||
90 | class ItsImageSignature(ItsNode): | ||
91 | def __init__(self, name, parent_node, algo, keyname, opt_props=None): | ||
92 | properties = { | ||
93 | "algo": algo, | ||
94 | "key-name-hint": keyname | ||
95 | } | ||
96 | if opt_props: | ||
97 | properties.update(opt_props) | ||
98 | super().__init__(name, parent_node, None, properties) | ||
99 | |||
100 | class ItsNodeImage(ItsNode): | ||
101 | def __init__(self, name, parent_node, description, type, compression, sub_nodes=None, opt_props=None): | ||
102 | properties = { | ||
103 | "description": description, | ||
104 | "type": type, | ||
105 | "compression": compression, | ||
106 | } | ||
107 | if opt_props: | ||
108 | properties.update(opt_props) | ||
109 | super().__init__(name, parent_node, sub_nodes, properties) | ||
110 | |||
111 | class ItsNodeDtb(ItsNodeImage): | ||
112 | def __init__(self, name, parent_node, description, type, compression, | ||
113 | sub_nodes=None, opt_props=None, compatible=None): | ||
114 | super().__init__(name, parent_node, description, type, compression, sub_nodes, opt_props) | ||
115 | self.compatible = compatible | ||
116 | |||
117 | class ItsNodeDtbAlias(ItsNode): | ||
118 | """Additional Configuration Node for a DTB | ||
119 | |||
120 | Symlinks pointing to a DTB file are handled by an addtitional | ||
121 | configuration node referring to another DTB image node. | ||
122 | """ | ||
123 | def __init__(self, name, alias_name, compatible=None): | ||
124 | super().__init__(name, parent_node=None, sub_nodes=None, properties=None) | ||
125 | self.alias_name = alias_name | ||
126 | self.compatible = compatible | ||
127 | |||
128 | class ItsNodeConfigurationSignature(ItsNode): | ||
129 | def __init__(self, name, parent_node, algo, keyname, opt_props=None): | ||
130 | properties = { | ||
131 | "algo": algo, | ||
132 | "key-name-hint": keyname | ||
133 | } | ||
134 | if opt_props: | ||
135 | properties.update(opt_props) | ||
136 | super().__init__(name, parent_node, None, properties) | ||
137 | |||
138 | class ItsNodeConfiguration(ItsNode): | ||
139 | def __init__(self, name, parent_node, description, sub_nodes=None, opt_props=None): | ||
140 | properties = { | ||
141 | "description": description, | ||
142 | } | ||
143 | if opt_props: | ||
144 | properties.update(opt_props) | ||
145 | super().__init__(name, parent_node, sub_nodes, properties) | ||
146 | |||
147 | class ItsNodeRootKernel(ItsNode): | ||
148 | """Create FIT images for the kernel | ||
149 | |||
150 | Currently only a single kernel (no less or more) can be added to the FIT | ||
151 | image along with 0 or more device trees and 0 or 1 ramdisk. | ||
152 | |||
153 | If a device tree included in the FIT image, the default configuration is the | ||
154 | firt DTB. If there is no dtb present than the default configuation the kernel. | ||
155 | """ | ||
156 | def __init__(self, description, address_cells, host_prefix, arch, conf_prefix, | ||
157 | sign_enable=False, sign_keydir=None, | ||
158 | mkimage=None, mkimage_dtcopts=None, | ||
159 | mkimage_sign=None, mkimage_sign_args=None, | ||
160 | hash_algo=None, sign_algo=None, pad_algo=None, | ||
161 | sign_keyname_conf=None, | ||
162 | sign_individual=False, sign_keyname_img=None): | ||
163 | props = { | ||
164 | "description": description, | ||
165 | "#address-cells": f"<{address_cells}>" | ||
166 | } | ||
167 | super().__init__("/", None, None, props) | ||
168 | self.images = ItsNodeImages(self) | ||
169 | self.configurations = ItsNodeConfigurations(self) | ||
170 | |||
171 | self._host_prefix = host_prefix | ||
172 | self._arch = arch | ||
173 | self._conf_prefix = conf_prefix | ||
174 | |||
175 | # Signature related properties | ||
176 | self._sign_enable = sign_enable | ||
177 | self._sign_keydir = sign_keydir | ||
178 | self._mkimage = mkimage | ||
179 | self._mkimage_dtcopts = mkimage_dtcopts | ||
180 | self._mkimage_sign = mkimage_sign | ||
181 | self._mkimage_sign_args = mkimage_sign_args | ||
182 | self._hash_algo = hash_algo | ||
183 | self._sign_algo = sign_algo | ||
184 | self._pad_algo = pad_algo | ||
185 | self._sign_keyname_conf = sign_keyname_conf | ||
186 | self._sign_individual = sign_individual | ||
187 | self._sign_keyname_img = sign_keyname_img | ||
188 | self._sanitize_sign_config() | ||
189 | |||
190 | self._dtbs = [] | ||
191 | self._dtb_alias = [] | ||
192 | self._kernel = None | ||
193 | self._ramdisk = None | ||
194 | self._bootscr = None | ||
195 | self._setup = None | ||
196 | |||
197 | def _sanitize_sign_config(self): | ||
198 | if self._sign_enable: | ||
199 | if not self._hash_algo: | ||
200 | bb.fatal("FIT image signing is enabled but no hash algorithm is provided.") | ||
201 | if not self._sign_algo: | ||
202 | bb.fatal("FIT image signing is enabled but no signature algorithm is provided.") | ||
203 | if not self._pad_algo: | ||
204 | bb.fatal("FIT image signing is enabled but no padding algorithm is provided.") | ||
205 | if not self._sign_keyname_conf: | ||
206 | bb.fatal("FIT image signing is enabled but no configuration key name is provided.") | ||
207 | if self._sign_individual and not self._sign_keyname_img: | ||
208 | bb.fatal("FIT image signing is enabled for individual images but no image key name is provided.") | ||
209 | |||
210 | def write_its_file(self, itsfile): | ||
211 | with open(itsfile, 'w') as f: | ||
212 | f.write("/dts-v1/;\n\n") | ||
213 | self.emit(f, 0) | ||
214 | |||
215 | def its_add_node_image(self, image_id, description, image_type, compression, opt_props): | ||
216 | image_node = ItsNodeImage( | ||
217 | image_id, | ||
218 | self.images, | ||
219 | description, | ||
220 | image_type, | ||
221 | compression, | ||
222 | opt_props=opt_props | ||
223 | ) | ||
224 | if self._hash_algo: | ||
225 | ItsNodeHash( | ||
226 | "hash-1", | ||
227 | image_node, | ||
228 | self._hash_algo | ||
229 | ) | ||
230 | if self._sign_individual: | ||
231 | ItsImageSignature( | ||
232 | "signature-1", | ||
233 | image_node, | ||
234 | f"{self._hash_algo},{self._sign_algo}", | ||
235 | self._sign_keyname_img | ||
236 | ) | ||
237 | return image_node | ||
238 | |||
239 | def its_add_node_dtb(self, image_id, description, image_type, compression, opt_props, compatible): | ||
240 | dtb_node = ItsNodeDtb( | ||
241 | image_id, | ||
242 | self.images, | ||
243 | description, | ||
244 | image_type, | ||
245 | compression, | ||
246 | opt_props=opt_props, | ||
247 | compatible=compatible | ||
248 | ) | ||
249 | if self._hash_algo: | ||
250 | ItsNodeHash( | ||
251 | "hash-1", | ||
252 | dtb_node, | ||
253 | self._hash_algo | ||
254 | ) | ||
255 | if self._sign_individual: | ||
256 | ItsImageSignature( | ||
257 | "signature-1", | ||
258 | dtb_node, | ||
259 | f"{self._hash_algo},{self._sign_algo}", | ||
260 | self._sign_keyname_img | ||
261 | ) | ||
262 | return dtb_node | ||
263 | |||
264 | def fitimage_emit_section_kernel(self, kernel_id, kernel_path, compression, | ||
265 | load, entrypoint, mkimage_kernel_type, entrysymbol=None): | ||
266 | """Emit the fitImage ITS kernel section""" | ||
267 | if self._kernel: | ||
268 | bb.fatal("Kernel section already exists in the ITS file.") | ||
269 | if entrysymbol: | ||
270 | result = subprocess.run([self._host_prefix + "nm", "vmlinux"], capture_output=True, text=True) | ||
271 | for line in result.stdout.splitlines(): | ||
272 | parts = line.split() | ||
273 | if len(parts) == 3 and parts[2] == entrysymbol: | ||
274 | entrypoint = "<0x%s>" % parts[0] | ||
275 | break | ||
276 | kernel_node = self.its_add_node_image( | ||
277 | kernel_id, | ||
278 | "Linux kernel", | ||
279 | mkimage_kernel_type, | ||
280 | compression, | ||
281 | { | ||
282 | "data": '/incbin/("' + kernel_path + '")', | ||
283 | "arch": self._arch, | ||
284 | "os": "linux", | ||
285 | "load": f"<{load}>", | ||
286 | "entry": f"<{entrypoint}>" | ||
287 | } | ||
288 | ) | ||
289 | self._kernel = kernel_node | ||
290 | |||
291 | def fitimage_emit_section_dtb(self, dtb_id, dtb_path, dtb_loadaddress=None, | ||
292 | dtbo_loadaddress=None, add_compatible=False): | ||
293 | """Emit the fitImage ITS DTB section""" | ||
294 | load=None | ||
295 | dtb_ext = os.path.splitext(dtb_path)[1] | ||
296 | if dtb_ext == ".dtbo": | ||
297 | if dtbo_loadaddress: | ||
298 | load = dtbo_loadaddress | ||
299 | elif dtb_loadaddress: | ||
300 | load = dtb_loadaddress | ||
301 | |||
302 | opt_props = { | ||
303 | "data": '/incbin/("' + dtb_path + '")', | ||
304 | "arch": self._arch | ||
305 | } | ||
306 | if load: | ||
307 | opt_props["load"] = f"<{load}>" | ||
308 | |||
309 | # Preserve the DTB's compatible string to be added to the configuration node | ||
310 | compatible = None | ||
311 | if add_compatible: | ||
312 | compatible = get_compatible_from_dtb(dtb_path) | ||
313 | |||
314 | dtb_node = self.its_add_node_dtb( | ||
315 | "fdt-" + dtb_id, | ||
316 | "Flattened Device Tree blob", | ||
317 | "flat_dt", | ||
318 | "none", | ||
319 | opt_props, | ||
320 | compatible | ||
321 | ) | ||
322 | self._dtbs.append(dtb_node) | ||
323 | |||
324 | def fitimage_emit_section_dtb_alias(self, dtb_alias_id, dtb_path, add_compatible=False): | ||
325 | """Add a configuration node referring to another DTB""" | ||
326 | # Preserve the DTB's compatible string to be added to the configuration node | ||
327 | compatible = None | ||
328 | if add_compatible: | ||
329 | compatible = get_compatible_from_dtb(dtb_path) | ||
330 | |||
331 | dtb_id = os.path.basename(dtb_path) | ||
332 | dtb_alias_node = ItsNodeDtbAlias("fdt-" + dtb_id, dtb_alias_id, compatible) | ||
333 | self._dtb_alias.append(dtb_alias_node) | ||
334 | bb.warn(f"compatible: {compatible}, dtb_alias_id: {dtb_alias_id}, dtb_id: {dtb_id}, dtb_path: {dtb_path}") | ||
335 | |||
336 | def fitimage_emit_section_boot_script(self, bootscr_id, bootscr_path): | ||
337 | """Emit the fitImage ITS u-boot script section""" | ||
338 | if self._bootscr: | ||
339 | bb.fatal("U-boot script section already exists in the ITS file.") | ||
340 | bootscr_node = self.its_add_node_image( | ||
341 | bootscr_id, | ||
342 | "U-boot script", | ||
343 | "script", | ||
344 | "none", | ||
345 | { | ||
346 | "data": '/incbin/("' + bootscr_path + '")', | ||
347 | "arch": self._arch, | ||
348 | "type": "script" | ||
349 | } | ||
350 | ) | ||
351 | self._bootscr = bootscr_node | ||
352 | |||
353 | def fitimage_emit_section_setup(self, setup_id, setup_path): | ||
354 | """Emit the fitImage ITS setup section""" | ||
355 | if self._setup: | ||
356 | bb.fatal("Setup section already exists in the ITS file.") | ||
357 | load = "<0x00090000>" | ||
358 | entry = "<0x00090000>" | ||
359 | setup_node = self.its_add_node_image( | ||
360 | setup_id, | ||
361 | "Linux setup.bin", | ||
362 | "x86_setup", | ||
363 | "none", | ||
364 | { | ||
365 | "data": '/incbin/("' + setup_path + '")', | ||
366 | "arch": self._arch, | ||
367 | "os": "linux", | ||
368 | "load": load, | ||
369 | "entry": entry | ||
370 | } | ||
371 | ) | ||
372 | self._setup = setup_node | ||
373 | |||
374 | def fitimage_emit_section_ramdisk(self, ramdisk_id, ramdisk_path, description="ramdisk", load=None, entry=None): | ||
375 | """Emit the fitImage ITS ramdisk section""" | ||
376 | if self._ramdisk: | ||
377 | bb.fatal("Ramdisk section already exists in the ITS file.") | ||
378 | opt_props = { | ||
379 | "data": '/incbin/("' + ramdisk_path + '")', | ||
380 | "type": "ramdisk", | ||
381 | "arch": self._arch, | ||
382 | "os": "linux" | ||
383 | } | ||
384 | if load: | ||
385 | opt_props["load"] = f"<{load}>" | ||
386 | if entry: | ||
387 | opt_props["entry"] = f"<{entry}>" | ||
388 | |||
389 | ramdisk_node = self.its_add_node_image( | ||
390 | ramdisk_id, | ||
391 | description, | ||
392 | "ramdisk", | ||
393 | "none", | ||
394 | opt_props | ||
395 | ) | ||
396 | self._ramdisk = ramdisk_node | ||
397 | |||
398 | def _fitimage_emit_one_section_config(self, conf_node_name, dtb=None): | ||
399 | """Emit the fitImage ITS configuration section""" | ||
400 | opt_props = {} | ||
401 | conf_desc = [] | ||
402 | sign_entries = [] | ||
403 | |||
404 | if self._kernel: | ||
405 | conf_desc.append("Linux kernel") | ||
406 | opt_props["kernel"] = self._kernel.name | ||
407 | if self._sign_enable: | ||
408 | sign_entries.append("kernel") | ||
409 | |||
410 | if dtb: | ||
411 | conf_desc.append("FDT blob") | ||
412 | opt_props["fdt"] = dtb.name | ||
413 | if dtb.compatible: | ||
414 | opt_props["compatible"] = dtb.compatible | ||
415 | if self._sign_enable: | ||
416 | sign_entries.append("fdt") | ||
417 | |||
418 | if self._ramdisk: | ||
419 | conf_desc.append("ramdisk") | ||
420 | opt_props["ramdisk"] = self._ramdisk.name | ||
421 | if self._sign_enable: | ||
422 | sign_entries.append("ramdisk") | ||
423 | |||
424 | if self._bootscr: | ||
425 | conf_desc.append("u-boot script") | ||
426 | opt_props["bootscr"] = self._bootscr.name | ||
427 | if self._sign_enable: | ||
428 | sign_entries.append("bootscr") | ||
429 | |||
430 | if self._setup: | ||
431 | conf_desc.append("setup") | ||
432 | opt_props["setup"] = self._setup.name | ||
433 | if self._sign_enable: | ||
434 | sign_entries.append("setup") | ||
435 | |||
436 | # First added configuration is the default configuration | ||
437 | default_flag = "0" | ||
438 | if len(self.configurations.sub_nodes) == 0: | ||
439 | default_flag = "1" | ||
440 | |||
441 | conf_node = ItsNodeConfiguration( | ||
442 | conf_node_name, | ||
443 | self.configurations, | ||
444 | f"{default_flag} {', '.join(conf_desc)}", | ||
445 | opt_props=opt_props | ||
446 | ) | ||
447 | if self._hash_algo: | ||
448 | ItsNodeHash( | ||
449 | "hash-1", | ||
450 | conf_node, | ||
451 | self._hash_algo | ||
452 | ) | ||
453 | if self._sign_enable: | ||
454 | ItsNodeConfigurationSignature( | ||
455 | "signature-1", | ||
456 | conf_node, | ||
457 | f"{self._hash_algo},{self._sign_algo}", | ||
458 | self._sign_keyname_conf, | ||
459 | opt_props={ | ||
460 | "padding": self._pad_algo, | ||
461 | "sign-images": sign_entries | ||
462 | } | ||
463 | ) | ||
464 | |||
465 | def fitimage_emit_section_config(self, default_dtb_image=None): | ||
466 | if self._dtbs: | ||
467 | for dtb in self._dtbs: | ||
468 | dtb_name = dtb.name | ||
469 | if dtb.name.startswith("fdt-"): | ||
470 | dtb_name = dtb.name[len("fdt-"):] | ||
471 | self._fitimage_emit_one_section_config(self._conf_prefix + dtb_name, dtb) | ||
472 | for dtb in self._dtb_alias: | ||
473 | self._fitimage_emit_one_section_config(self._conf_prefix + dtb.alias_name, dtb) | ||
474 | else: | ||
475 | # Currently exactly one kernel is supported. | ||
476 | self._fitimage_emit_one_section_config(self._conf_prefix + "1") | ||
477 | |||
478 | default_conf = self.configurations.sub_nodes[0].name | ||
479 | if default_dtb_image and self._dtbs: | ||
480 | default_conf = self._conf_prefix + default_dtb_image | ||
481 | self.configurations.add_property('default', default_conf) | ||
482 | |||
483 | def run_mkimage_assemble(self, itsfile, fitfile): | ||
484 | cmd = [ | ||
485 | self._mkimage, | ||
486 | '-f', itsfile, | ||
487 | fitfile | ||
488 | ] | ||
489 | if self._mkimage_dtcopts: | ||
490 | cmd.insert(1, '-D') | ||
491 | cmd.insert(2, self._mkimage_dtcopts) | ||
492 | try: | ||
493 | subprocess.run(cmd, check=True, capture_output=True) | ||
494 | except subprocess.CalledProcessError as e: | ||
495 | bb.fatal(f"Command '{' '.join(cmd)}' failed with return code {e.returncode}\nstdout: {e.stdout.decode()}\nstderr: {e.stderr.decode()}\nitsflile: {os.path.abspath(itsfile)}") | ||
496 | |||
497 | def run_mkimage_sign(self, fitfile): | ||
498 | if not self._sign_enable: | ||
499 | bb.debug(1, "FIT image signing is disabled. Skipping signing.") | ||
500 | return | ||
501 | |||
502 | # Some sanity checks because mkimage exits with 0 also without needed keys | ||
503 | sign_key_path = os.path.join(self._sign_keydir, self._sign_keyname_conf) | ||
504 | if not os.path.exists(sign_key_path + '.key') or not os.path.exists(sign_key_path + '.crt'): | ||
505 | bb.fatal("%s.key or .crt does not exist" % sign_key_path) | ||
506 | if self._sign_individual: | ||
507 | sign_key_img_path = os.path.join(self._sign_keydir, self._sign_keyname_img) | ||
508 | if not os.path.exists(sign_key_img_path + '.key') or not os.path.exists(sign_key_img_path + '.crt'): | ||
509 | bb.fatal("%s.key or .crt does not exist" % sign_key_img_path) | ||
510 | |||
511 | cmd = [ | ||
512 | self._mkimage_sign, | ||
513 | '-F', | ||
514 | '-k', self._sign_keydir, | ||
515 | '-r', fitfile | ||
516 | ] | ||
517 | if self._mkimage_dtcopts: | ||
518 | cmd.extend(['-D', self._mkimage_dtcopts]) | ||
519 | if self._mkimage_sign_args: | ||
520 | cmd.extend(shlex.split(self._mkimage_sign_args)) | ||
521 | try: | ||
522 | subprocess.run(cmd, check=True, capture_output=True) | ||
523 | except subprocess.CalledProcessError as e: | ||
524 | bb.fatal(f"Command '{' '.join(cmd)}' failed with return code {e.returncode}\nstdout: {e.stdout.decode()}\nstderr: {e.stderr.decode()}") | ||
525 | |||
526 | |||
527 | def symlink_points_below(file_or_symlink, expected_parent_dir): | ||
528 | """returns symlink destination if it points below directory""" | ||
529 | file_path = os.path.join(expected_parent_dir, file_or_symlink) | ||
530 | if not os.path.islink(file_path): | ||
531 | return None | ||
532 | |||
533 | realpath = os.path.relpath(os.path.realpath(file_path), expected_parent_dir) | ||
534 | if realpath.startswith(".."): | ||
535 | return None | ||
536 | |||
537 | return realpath | ||
538 | |||
539 | def get_compatible_from_dtb(dtb_path, fdtget_path="fdtget"): | ||
540 | compatible = None | ||
541 | cmd = [fdtget_path, "-t", "s", dtb_path, "/", "compatible"] | ||
542 | try: | ||
543 | ret = subprocess.run(cmd, check=True, capture_output=True, text=True) | ||
544 | compatible = ret.stdout.strip().split() | ||
545 | except subprocess.CalledProcessError: | ||
546 | compatible = None | ||
547 | return compatible | ||
diff --git a/meta/lib/oe/go.py b/meta/lib/oe/go.py index dfd957d157..4559dc63b2 100644 --- a/meta/lib/oe/go.py +++ b/meta/lib/oe/go.py | |||
@@ -7,6 +7,10 @@ | |||
7 | import re | 7 | import re |
8 | 8 | ||
9 | def map_arch(a): | 9 | def map_arch(a): |
10 | """ | ||
11 | Map our architecture names to Go's GOARCH names. | ||
12 | See https://github.com/golang/go/blob/master/src/internal/syslist/syslist.go for the complete list. | ||
13 | """ | ||
10 | if re.match('i.86', a): | 14 | if re.match('i.86', a): |
11 | return '386' | 15 | return '386' |
12 | elif a == 'x86_64': | 16 | elif a == 'x86_64': |
@@ -31,4 +35,4 @@ def map_arch(a): | |||
31 | return 'riscv64' | 35 | return 'riscv64' |
32 | elif a == 'loongarch64': | 36 | elif a == 'loongarch64': |
33 | return 'loong64' | 37 | return 'loong64' |
34 | return '' | 38 | raise KeyError(f"Cannot map architecture {a}") |
diff --git a/meta/lib/oe/license.py b/meta/lib/oe/license.py index d9c8d94da4..6e55fa1e7f 100644 --- a/meta/lib/oe/license.py +++ b/meta/lib/oe/license.py | |||
@@ -7,6 +7,7 @@ | |||
7 | 7 | ||
8 | import ast | 8 | import ast |
9 | import re | 9 | import re |
10 | import oe.qa | ||
10 | from fnmatch import fnmatchcase as fnmatch | 11 | from fnmatch import fnmatchcase as fnmatch |
11 | 12 | ||
12 | def license_ok(license, dont_want_licenses): | 13 | def license_ok(license, dont_want_licenses): |
@@ -259,3 +260,220 @@ def apply_pkg_license_exception(pkg, bad_licenses, exceptions): | |||
259 | """Return remaining bad licenses after removing any package exceptions""" | 260 | """Return remaining bad licenses after removing any package exceptions""" |
260 | 261 | ||
261 | return [lic for lic in bad_licenses if pkg + ':' + lic not in exceptions] | 262 | return [lic for lic in bad_licenses if pkg + ':' + lic not in exceptions] |
263 | |||
264 | def return_spdx(d, license): | ||
265 | """ | ||
266 | This function returns the spdx mapping of a license if it exists. | ||
267 | """ | ||
268 | return d.getVarFlag('SPDXLICENSEMAP', license) | ||
269 | |||
270 | def canonical_license(d, license): | ||
271 | """ | ||
272 | Return the canonical (SPDX) form of the license if available (so GPLv3 | ||
273 | becomes GPL-3.0-only) or the passed license if there is no canonical form. | ||
274 | """ | ||
275 | return d.getVarFlag('SPDXLICENSEMAP', license) or license | ||
276 | |||
277 | def expand_wildcard_licenses(d, wildcard_licenses): | ||
278 | """ | ||
279 | There are some common wildcard values users may want to use. Support them | ||
280 | here. | ||
281 | """ | ||
282 | licenses = set(wildcard_licenses) | ||
283 | mapping = { | ||
284 | "AGPL-3.0*" : ["AGPL-3.0-only", "AGPL-3.0-or-later"], | ||
285 | "GPL-3.0*" : ["GPL-3.0-only", "GPL-3.0-or-later"], | ||
286 | "LGPL-3.0*" : ["LGPL-3.0-only", "LGPL-3.0-or-later"], | ||
287 | } | ||
288 | for k in mapping: | ||
289 | if k in wildcard_licenses: | ||
290 | licenses.remove(k) | ||
291 | for item in mapping[k]: | ||
292 | licenses.add(item) | ||
293 | |||
294 | for l in licenses: | ||
295 | if l in obsolete_license_list(): | ||
296 | bb.fatal("Error, %s is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE" % l) | ||
297 | if "*" in l: | ||
298 | bb.fatal("Error, %s is an invalid license wildcard entry" % l) | ||
299 | |||
300 | return list(licenses) | ||
301 | |||
302 | def incompatible_license_contains(license, truevalue, falsevalue, d): | ||
303 | license = canonical_license(d, license) | ||
304 | bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() | ||
305 | bad_licenses = expand_wildcard_licenses(d, bad_licenses) | ||
306 | return truevalue if license in bad_licenses else falsevalue | ||
307 | |||
308 | def incompatible_pkg_license(d, dont_want_licenses, license): | ||
309 | # Handles an "or" or two license sets provided by | ||
310 | # flattened_licenses(), pick one that works if possible. | ||
311 | def choose_lic_set(a, b): | ||
312 | return a if all(license_ok(canonical_license(d, lic), | ||
313 | dont_want_licenses) for lic in a) else b | ||
314 | |||
315 | try: | ||
316 | licenses = flattened_licenses(license, choose_lic_set) | ||
317 | except LicenseError as exc: | ||
318 | bb.fatal('%s: %s' % (d.getVar('P'), exc)) | ||
319 | |||
320 | incompatible_lic = [] | ||
321 | for l in licenses: | ||
322 | license = canonical_license(d, l) | ||
323 | if not license_ok(license, dont_want_licenses): | ||
324 | incompatible_lic.append(license) | ||
325 | |||
326 | return sorted(incompatible_lic) | ||
327 | |||
328 | def incompatible_license(d, dont_want_licenses, package=None): | ||
329 | """ | ||
330 | This function checks if a recipe has only incompatible licenses. It also | ||
331 | take into consideration 'or' operand. dont_want_licenses should be passed | ||
332 | as canonical (SPDX) names. | ||
333 | """ | ||
334 | license = d.getVar("LICENSE:%s" % package) if package else None | ||
335 | if not license: | ||
336 | license = d.getVar('LICENSE') | ||
337 | |||
338 | return incompatible_pkg_license(d, dont_want_licenses, license) | ||
339 | |||
340 | def check_license_flags(d): | ||
341 | """ | ||
342 | This function checks if a recipe has any LICENSE_FLAGS that | ||
343 | aren't acceptable. | ||
344 | |||
345 | If it does, it returns the all LICENSE_FLAGS missing from the list | ||
346 | of acceptable license flags, or all of the LICENSE_FLAGS if there | ||
347 | is no list of acceptable flags. | ||
348 | |||
349 | If everything is is acceptable, it returns None. | ||
350 | """ | ||
351 | |||
352 | def license_flag_matches(flag, acceptlist, pn): | ||
353 | """ | ||
354 | Return True if flag matches something in acceptlist, None if not. | ||
355 | |||
356 | Before we test a flag against the acceptlist, we append _${PN} | ||
357 | to it. We then try to match that string against the | ||
358 | acceptlist. This covers the normal case, where we expect | ||
359 | LICENSE_FLAGS to be a simple string like 'commercial', which | ||
360 | the user typically matches exactly in the acceptlist by | ||
361 | explicitly appending the package name e.g 'commercial_foo'. | ||
362 | If we fail the match however, we then split the flag across | ||
363 | '_' and append each fragment and test until we either match or | ||
364 | run out of fragments. | ||
365 | """ | ||
366 | flag_pn = ("%s_%s" % (flag, pn)) | ||
367 | for candidate in acceptlist: | ||
368 | if flag_pn == candidate: | ||
369 | return True | ||
370 | |||
371 | flag_cur = "" | ||
372 | flagments = flag_pn.split("_") | ||
373 | flagments.pop() # we've already tested the full string | ||
374 | for flagment in flagments: | ||
375 | if flag_cur: | ||
376 | flag_cur += "_" | ||
377 | flag_cur += flagment | ||
378 | for candidate in acceptlist: | ||
379 | if flag_cur == candidate: | ||
380 | return True | ||
381 | return False | ||
382 | |||
383 | def all_license_flags_match(license_flags, acceptlist): | ||
384 | """ Return all unmatched flags, None if all flags match """ | ||
385 | pn = d.getVar('PN') | ||
386 | split_acceptlist = acceptlist.split() | ||
387 | flags = [] | ||
388 | for flag in license_flags.split(): | ||
389 | if not license_flag_matches(flag, split_acceptlist, pn): | ||
390 | flags.append(flag) | ||
391 | return flags if flags else None | ||
392 | |||
393 | license_flags = d.getVar('LICENSE_FLAGS') | ||
394 | if license_flags: | ||
395 | acceptlist = d.getVar('LICENSE_FLAGS_ACCEPTED') | ||
396 | if not acceptlist: | ||
397 | return license_flags.split() | ||
398 | unmatched_flags = all_license_flags_match(license_flags, acceptlist) | ||
399 | if unmatched_flags: | ||
400 | return unmatched_flags | ||
401 | return None | ||
402 | |||
403 | def check_license_format(d): | ||
404 | """ | ||
405 | This function checks if LICENSE is well defined, | ||
406 | Validate operators in LICENSES. | ||
407 | No spaces are allowed between LICENSES. | ||
408 | """ | ||
409 | pn = d.getVar('PN') | ||
410 | licenses = d.getVar('LICENSE') | ||
411 | |||
412 | elements = list(filter(lambda x: x.strip(), license_operator.split(licenses))) | ||
413 | for pos, element in enumerate(elements): | ||
414 | if license_pattern.match(element): | ||
415 | if pos > 0 and license_pattern.match(elements[pos - 1]): | ||
416 | oe.qa.handle_error('license-format', | ||
417 | '%s: LICENSE value "%s" has an invalid format - license names ' \ | ||
418 | 'must be separated by the following characters to indicate ' \ | ||
419 | 'the license selection: %s' % | ||
420 | (pn, licenses, license_operator_chars), d) | ||
421 | elif not license_operator.match(element): | ||
422 | oe.qa.handle_error('license-format', | ||
423 | '%s: LICENSE value "%s" has an invalid separator "%s" that is not ' \ | ||
424 | 'in the valid list of separators (%s)' % | ||
425 | (pn, licenses, element, license_operator_chars), d) | ||
426 | |||
427 | def skip_incompatible_package_licenses(d, pkgs): | ||
428 | if not pkgs: | ||
429 | return {} | ||
430 | |||
431 | pn = d.getVar("PN") | ||
432 | |||
433 | check_license = False if pn.startswith("nativesdk-") else True | ||
434 | for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", | ||
435 | "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}", | ||
436 | "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]: | ||
437 | if pn.endswith(d.expand(t)): | ||
438 | check_license = False | ||
439 | if pn.startswith("gcc-source-"): | ||
440 | check_license = False | ||
441 | |||
442 | bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() | ||
443 | if not check_license or not bad_licenses: | ||
444 | return {} | ||
445 | |||
446 | bad_licenses = expand_wildcard_licenses(d, bad_licenses) | ||
447 | |||
448 | exceptions = (d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") or "").split() | ||
449 | |||
450 | for lic_exception in exceptions: | ||
451 | if ":" in lic_exception: | ||
452 | lic_exception = lic_exception.split(":")[1] | ||
453 | if lic_exception in obsolete_license_list(): | ||
454 | bb.fatal("Obsolete license %s used in INCOMPATIBLE_LICENSE_EXCEPTIONS" % lic_exception) | ||
455 | |||
456 | skipped_pkgs = {} | ||
457 | for pkg in pkgs: | ||
458 | remaining_bad_licenses = apply_pkg_license_exception(pkg, bad_licenses, exceptions) | ||
459 | |||
460 | incompatible_lic = incompatible_license(d, remaining_bad_licenses, pkg) | ||
461 | if incompatible_lic: | ||
462 | skipped_pkgs[pkg] = incompatible_lic | ||
463 | |||
464 | return skipped_pkgs | ||
465 | |||
466 | def tidy_licenses(value): | ||
467 | """ | ||
468 | Flat, split and sort licenses. | ||
469 | """ | ||
470 | from oe.license import flattened_licenses | ||
471 | |||
472 | def _choose(a, b): | ||
473 | str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold) | ||
474 | return ["(%s | %s)" % (str_a, str_b)] | ||
475 | |||
476 | if not isinstance(value, str): | ||
477 | value = " & ".join(value) | ||
478 | |||
479 | return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold) | ||
diff --git a/meta/lib/oe/license_finder.py b/meta/lib/oe/license_finder.py new file mode 100644 index 0000000000..16f5d7c94c --- /dev/null +++ b/meta/lib/oe/license_finder.py | |||
@@ -0,0 +1,179 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import fnmatch | ||
8 | import hashlib | ||
9 | import logging | ||
10 | import os | ||
11 | import re | ||
12 | |||
13 | import bb | ||
14 | import bb.utils | ||
15 | |||
16 | logger = logging.getLogger("BitBake.OE.LicenseFinder") | ||
17 | |||
18 | def _load_hash_csv(d): | ||
19 | """ | ||
20 | Load a mapping of (checksum: license name) from all files/license-hashes.csv | ||
21 | files that can be found in the available layers. | ||
22 | """ | ||
23 | import csv | ||
24 | md5sums = {} | ||
25 | |||
26 | # Read license md5sums from csv file | ||
27 | for path in d.getVar('BBPATH').split(':'): | ||
28 | csv_path = os.path.join(path, 'files', 'license-hashes.csv') | ||
29 | if os.path.isfile(csv_path): | ||
30 | with open(csv_path, newline='') as csv_file: | ||
31 | reader = csv.DictReader(csv_file, delimiter=',', fieldnames=['md5sum', 'license']) | ||
32 | for row in reader: | ||
33 | md5sums[row['md5sum']] = row['license'] | ||
34 | |||
35 | return md5sums | ||
36 | |||
37 | |||
38 | def _crunch_known_licenses(d): | ||
39 | """ | ||
40 | Calculate the MD5 checksums for the original and "crunched" versions of all | ||
41 | known licenses. | ||
42 | """ | ||
43 | md5sums = {} | ||
44 | |||
45 | lic_dirs = [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or "").split() | ||
46 | for lic_dir in lic_dirs: | ||
47 | for fn in os.listdir(lic_dir): | ||
48 | path = os.path.join(lic_dir, fn) | ||
49 | # Hash the exact contents | ||
50 | md5value = bb.utils.md5_file(path) | ||
51 | md5sums[md5value] = fn | ||
52 | # Also hash a "crunched" version | ||
53 | md5value = _crunch_license(path) | ||
54 | md5sums[md5value] = fn | ||
55 | |||
56 | return md5sums | ||
57 | |||
58 | |||
59 | def _crunch_license(licfile): | ||
60 | ''' | ||
61 | Remove non-material text from a license file and then calculate its | ||
62 | md5sum. This works well for licenses that contain a copyright statement, | ||
63 | but is also a useful way to handle people's insistence upon reformatting | ||
64 | the license text slightly (with no material difference to the text of the | ||
65 | license). | ||
66 | ''' | ||
67 | |||
68 | import oe.utils | ||
69 | |||
70 | # Note: these are carefully constructed! | ||
71 | license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$') | ||
72 | license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$') | ||
73 | copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$') | ||
74 | disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$') | ||
75 | email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$') | ||
76 | header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$') | ||
77 | tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$') | ||
78 | url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$') | ||
79 | |||
80 | lictext = [] | ||
81 | with open(licfile, 'r', errors='surrogateescape') as f: | ||
82 | for line in f: | ||
83 | # Drop opening statements | ||
84 | if copyright_re.match(line): | ||
85 | continue | ||
86 | elif disclaimer_re.match(line): | ||
87 | continue | ||
88 | elif email_re.match(line): | ||
89 | continue | ||
90 | elif header_re.match(line): | ||
91 | continue | ||
92 | elif tag_re.match(line): | ||
93 | continue | ||
94 | elif url_re.match(line): | ||
95 | continue | ||
96 | elif license_title_re.match(line): | ||
97 | continue | ||
98 | elif license_statement_re.match(line): | ||
99 | continue | ||
100 | # Strip comment symbols | ||
101 | line = line.replace('*', '') \ | ||
102 | .replace('#', '') | ||
103 | # Unify spelling | ||
104 | line = line.replace('sub-license', 'sublicense') | ||
105 | # Squash spaces | ||
106 | line = oe.utils.squashspaces(line.strip()) | ||
107 | # Replace smart quotes, double quotes and backticks with single quotes | ||
108 | line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'') | ||
109 | # Unify brackets | ||
110 | line = line.replace("{", "[").replace("}", "]") | ||
111 | if line: | ||
112 | lictext.append(line) | ||
113 | |||
114 | m = hashlib.md5() | ||
115 | try: | ||
116 | m.update(' '.join(lictext).encode('utf-8')) | ||
117 | md5val = m.hexdigest() | ||
118 | except UnicodeEncodeError: | ||
119 | md5val = None | ||
120 | return md5val | ||
121 | |||
122 | |||
123 | def find_license_files(srctree, first_only=False): | ||
124 | """ | ||
125 | Search srctree for files that look like they could be licenses. | ||
126 | If first_only is True, only return the first file found. | ||
127 | """ | ||
128 | licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] | ||
129 | skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go", ".sh") | ||
130 | licfiles = [] | ||
131 | for root, dirs, files in os.walk(srctree): | ||
132 | # Sort files so that LICENSE is before LICENSE.subcomponent, which is | ||
133 | # meaningful if first_only is set. | ||
134 | for fn in sorted(files): | ||
135 | if fn.endswith(skip_extensions): | ||
136 | continue | ||
137 | for spec in licspecs: | ||
138 | if fnmatch.fnmatch(fn, spec): | ||
139 | fullpath = os.path.join(root, fn) | ||
140 | if not fullpath in licfiles: | ||
141 | licfiles.append(fullpath) | ||
142 | if first_only: | ||
143 | return licfiles | ||
144 | |||
145 | return licfiles | ||
146 | |||
147 | |||
148 | def match_licenses(licfiles, srctree, d, extra_hashes={}): | ||
149 | md5sums = {} | ||
150 | md5sums.update(_load_hash_csv(d)) | ||
151 | md5sums.update(_crunch_known_licenses(d)) | ||
152 | md5sums.update(extra_hashes) | ||
153 | |||
154 | licenses = [] | ||
155 | for licfile in sorted(licfiles): | ||
156 | resolved_licfile = d.expand(licfile) | ||
157 | md5value = bb.utils.md5_file(resolved_licfile) | ||
158 | license = md5sums.get(md5value, None) | ||
159 | if not license: | ||
160 | crunched_md5 = _crunch_license(resolved_licfile) | ||
161 | license = md5sums.get(crunched_md5, None) | ||
162 | if not license: | ||
163 | license = 'Unknown' | ||
164 | logger.info("Please add the following line for '%s' to a 'license-hashes.csv' " \ | ||
165 | "and replace `Unknown` with the license:\n" \ | ||
166 | "%s,Unknown" % (os.path.relpath(licfile, srctree + "/.."), md5value)) | ||
167 | |||
168 | licenses.append((license, os.path.relpath(licfile, srctree), md5value)) | ||
169 | |||
170 | return licenses | ||
171 | |||
172 | |||
173 | def find_licenses(srctree, d, first_only=False, extra_hashes={}): | ||
174 | licfiles = find_license_files(srctree, first_only) | ||
175 | licenses = match_licenses(licfiles, srctree, d, extra_hashes) | ||
176 | |||
177 | # FIXME should we grab at least one source file with a license header and add that too? | ||
178 | |||
179 | return licenses | ||
diff --git a/meta/lib/oe/manifest.py b/meta/lib/oe/manifest.py index 61f18adc4a..cf7a13c247 100644 --- a/meta/lib/oe/manifest.py +++ b/meta/lib/oe/manifest.py | |||
@@ -200,7 +200,3 @@ def create_manifest(d, final_manifest=False, manifest_dir=None, | |||
200 | manifest.create_final() | 200 | manifest.create_final() |
201 | else: | 201 | else: |
202 | manifest.create_initial() | 202 | manifest.create_initial() |
203 | |||
204 | |||
205 | if __name__ == "__main__": | ||
206 | pass | ||
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py index 1511ba47c4..ce69151e5d 100644 --- a/meta/lib/oe/package.py +++ b/meta/lib/oe/package.py | |||
@@ -14,10 +14,12 @@ import glob | |||
14 | import stat | 14 | import stat |
15 | import mmap | 15 | import mmap |
16 | import subprocess | 16 | import subprocess |
17 | import shutil | ||
17 | 18 | ||
19 | import bb.parse | ||
18 | import oe.cachedpath | 20 | import oe.cachedpath |
19 | 21 | ||
20 | def runstrip(arg): | 22 | def runstrip(file, elftype, strip, extra_strip_sections=''): |
21 | # Function to strip a single file, called from split_and_strip_files below | 23 | # Function to strip a single file, called from split_and_strip_files below |
22 | # A working 'file' (one which works on the target architecture) | 24 | # A working 'file' (one which works on the target architecture) |
23 | # | 25 | # |
@@ -27,12 +29,6 @@ def runstrip(arg): | |||
27 | # 8 - shared library | 29 | # 8 - shared library |
28 | # 16 - kernel module | 30 | # 16 - kernel module |
29 | 31 | ||
30 | if len(arg) == 3: | ||
31 | (file, elftype, strip) = arg | ||
32 | extra_strip_sections = '' | ||
33 | else: | ||
34 | (file, elftype, strip, extra_strip_sections) = arg | ||
35 | |||
36 | newmode = None | 32 | newmode = None |
37 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): | 33 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): |
38 | origmode = os.stat(file)[stat.ST_MODE] | 34 | origmode = os.stat(file)[stat.ST_MODE] |
@@ -195,20 +191,33 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, max_process, qa_alre | |||
195 | 191 | ||
196 | oe.utils.multiprocess_launch_mp(runstrip, sfiles, max_process) | 192 | oe.utils.multiprocess_launch_mp(runstrip, sfiles, max_process) |
197 | 193 | ||
194 | TRANSLATE = ( | ||
195 | ("@", "@at@"), | ||
196 | (" ", "@space@"), | ||
197 | ("\t", "@tab@"), | ||
198 | ("[", "@openbrace@"), | ||
199 | ("]", "@closebrace@"), | ||
200 | ("_", "@underscore@"), | ||
201 | (":", "@colon@"), | ||
202 | ) | ||
198 | 203 | ||
199 | def file_translate(file): | 204 | def file_translate(file): |
200 | ft = file.replace("@", "@at@") | 205 | ft = file |
201 | ft = ft.replace(" ", "@space@") | 206 | for s, replace in TRANSLATE: |
202 | ft = ft.replace("\t", "@tab@") | 207 | ft = ft.replace(s, replace) |
203 | ft = ft.replace("[", "@openbrace@") | 208 | |
204 | ft = ft.replace("]", "@closebrace@") | 209 | return ft |
205 | ft = ft.replace("_", "@underscore@") | 210 | |
211 | def file_reverse_translate(file): | ||
212 | ft = file | ||
213 | for s, replace in reversed(TRANSLATE): | ||
214 | ft = ft.replace(replace, s) | ||
215 | |||
206 | return ft | 216 | return ft |
207 | 217 | ||
208 | def filedeprunner(arg): | 218 | def filedeprunner(pkg, pkgfiles, rpmdeps, pkgdest): |
209 | import re, subprocess, shlex | 219 | import re, subprocess, shlex |
210 | 220 | ||
211 | (pkg, pkgfiles, rpmdeps, pkgdest) = arg | ||
212 | provides = {} | 221 | provides = {} |
213 | requires = {} | 222 | requires = {} |
214 | 223 | ||
@@ -648,6 +657,8 @@ def split_locales(d): | |||
648 | except ValueError: | 657 | except ValueError: |
649 | locale_index = len(packages) | 658 | locale_index = len(packages) |
650 | 659 | ||
660 | lic = d.getVar("LICENSE:" + pn + "-locale") | ||
661 | |||
651 | localepaths = [] | 662 | localepaths = [] |
652 | locales = set() | 663 | locales = set() |
653 | for localepath in (d.getVar('LOCALE_PATHS') or "").split(): | 664 | for localepath in (d.getVar('LOCALE_PATHS') or "").split(): |
@@ -683,6 +694,8 @@ def split_locales(d): | |||
683 | d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) | 694 | d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) |
684 | d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l)) | 695 | d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l)) |
685 | d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) | 696 | d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) |
697 | if lic: | ||
698 | d.setVar('LICENSE:' + pkg, lic) | ||
686 | if locale_section: | 699 | if locale_section: |
687 | d.setVar('SECTION:' + pkg, locale_section) | 700 | d.setVar('SECTION:' + pkg, locale_section) |
688 | 701 | ||
@@ -979,7 +992,7 @@ def copydebugsources(debugsrcdir, sources, d): | |||
979 | 992 | ||
980 | prefixmap = {} | 993 | prefixmap = {} |
981 | for flag in cflags.split(): | 994 | for flag in cflags.split(): |
982 | if not flag.startswith("-fdebug-prefix-map"): | 995 | if not flag.startswith("-ffile-prefix-map"): |
983 | continue | 996 | continue |
984 | if "recipe-sysroot" in flag: | 997 | if "recipe-sysroot" in flag: |
985 | continue | 998 | continue |
@@ -1037,6 +1050,51 @@ def copydebugsources(debugsrcdir, sources, d): | |||
1037 | if os.path.exists(p) and not os.listdir(p): | 1050 | if os.path.exists(p) and not os.listdir(p): |
1038 | os.rmdir(p) | 1051 | os.rmdir(p) |
1039 | 1052 | ||
1053 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
1054 | def save_debugsources_info(debugsrcdir, sources_raw, d): | ||
1055 | import json | ||
1056 | import bb.compress.zstd | ||
1057 | if debugsrcdir and sources_raw: | ||
1058 | debugsources_file = d.expand("${PKGDESTWORK}/debugsources/${PN}-debugsources.json.zstd") | ||
1059 | debugsources_dir = os.path.dirname(debugsources_file) | ||
1060 | if not os.path.isdir(debugsources_dir): | ||
1061 | bb.utils.mkdirhier(debugsources_dir) | ||
1062 | bb.utils.remove(debugsources_file) | ||
1063 | |||
1064 | workdir = d.getVar("WORKDIR") | ||
1065 | pn = d.getVar('PN') | ||
1066 | |||
1067 | # Kernel sources are in a different directory and are special case | ||
1068 | # we format the sources as expected by spdx by replacing /usr/src/kernel/ | ||
1069 | # into BP/ | ||
1070 | kernel_src = d.getVar('KERNEL_SRC_PATH') | ||
1071 | bp = d.getVar('BP') | ||
1072 | sources_dict = {} | ||
1073 | for file, src_files in sources_raw: | ||
1074 | file_clean = file.replace(f"{workdir}/package/","") | ||
1075 | sources_clean = [ | ||
1076 | src.replace(f"{debugsrcdir}/{pn}/", "") | ||
1077 | if not kernel_src else src.replace(f"{kernel_src}/", f"{bp}/") | ||
1078 | for src in src_files | ||
1079 | if not any(keyword in src for keyword in ("<internal>", "<built-in>")) and not src.endswith("/") | ||
1080 | ] | ||
1081 | sources_dict[file_clean] = sorted(sources_clean) | ||
1082 | num_threads = int(d.getVar("BB_NUMBER_THREADS")) | ||
1083 | with bb.compress.zstd.open(debugsources_file, "wt", encoding="utf-8", num_threads=num_threads) as f: | ||
1084 | json.dump(sources_dict, f, sort_keys=True) | ||
1085 | |||
1086 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
1087 | def read_debugsources_info(d): | ||
1088 | import json | ||
1089 | import bb.compress.zstd | ||
1090 | try: | ||
1091 | fn = d.expand("${PKGDESTWORK}/debugsources/${PN}-debugsources.json.zstd") | ||
1092 | num_threads = int(d.getVar("BB_NUMBER_THREADS")) | ||
1093 | with bb.compress.zstd.open(fn, "rt", encoding="utf-8", num_threads=num_threads) as f: | ||
1094 | return json.load(f) | ||
1095 | except FileNotFoundError: | ||
1096 | bb.debug(1, f"File not found: {fn}") | ||
1097 | return None | ||
1040 | 1098 | ||
1041 | def process_split_and_strip_files(d): | 1099 | def process_split_and_strip_files(d): |
1042 | cpath = oe.cachedpath.CachedPath() | 1100 | cpath = oe.cachedpath.CachedPath() |
@@ -1064,6 +1122,7 @@ def process_split_and_strip_files(d): | |||
1064 | d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): | 1122 | d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): |
1065 | checkelf = {} | 1123 | checkelf = {} |
1066 | checkelflinks = {} | 1124 | checkelflinks = {} |
1125 | checkstatic = {} | ||
1067 | for root, dirs, files in cpath.walk(dvar): | 1126 | for root, dirs, files in cpath.walk(dvar): |
1068 | for f in files: | 1127 | for f in files: |
1069 | file = os.path.join(root, f) | 1128 | file = os.path.join(root, f) |
@@ -1077,10 +1136,6 @@ def process_split_and_strip_files(d): | |||
1077 | if file in skipfiles: | 1136 | if file in skipfiles: |
1078 | continue | 1137 | continue |
1079 | 1138 | ||
1080 | if oe.package.is_static_lib(file): | ||
1081 | staticlibs.append(file) | ||
1082 | continue | ||
1083 | |||
1084 | try: | 1139 | try: |
1085 | ltarget = cpath.realpath(file, dvar, False) | 1140 | ltarget = cpath.realpath(file, dvar, False) |
1086 | s = cpath.lstat(ltarget) | 1141 | s = cpath.lstat(ltarget) |
@@ -1092,6 +1147,13 @@ def process_split_and_strip_files(d): | |||
1092 | continue | 1147 | continue |
1093 | if not s: | 1148 | if not s: |
1094 | continue | 1149 | continue |
1150 | |||
1151 | if oe.package.is_static_lib(file): | ||
1152 | # Use a reference of device ID and inode number to identify files | ||
1153 | file_reference = "%d_%d" % (s.st_dev, s.st_ino) | ||
1154 | checkstatic[file] = (file, file_reference) | ||
1155 | continue | ||
1156 | |||
1095 | # Check its an executable | 1157 | # Check its an executable |
1096 | if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \ | 1158 | if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \ |
1097 | or (s[stat.ST_MODE] & stat.S_IXOTH) \ | 1159 | or (s[stat.ST_MODE] & stat.S_IXOTH) \ |
@@ -1156,6 +1218,27 @@ def process_split_and_strip_files(d): | |||
1156 | # Modified the file so clear the cache | 1218 | # Modified the file so clear the cache |
1157 | cpath.updatecache(file) | 1219 | cpath.updatecache(file) |
1158 | 1220 | ||
1221 | # Do the same hardlink processing as above, but for static libraries | ||
1222 | results = list(checkstatic.keys()) | ||
1223 | |||
1224 | # As above, sort the results. | ||
1225 | results.sort(key=lambda x: x[0]) | ||
1226 | |||
1227 | for file in results: | ||
1228 | # Use a reference of device ID and inode number to identify files | ||
1229 | file_reference = checkstatic[file][1] | ||
1230 | if file_reference in inodes: | ||
1231 | os.unlink(file) | ||
1232 | os.link(inodes[file_reference][0], file) | ||
1233 | inodes[file_reference].append(file) | ||
1234 | else: | ||
1235 | inodes[file_reference] = [file] | ||
1236 | # break hardlink | ||
1237 | bb.utils.break_hardlinks(file) | ||
1238 | staticlibs.append(file) | ||
1239 | # Modified the file so clear the cache | ||
1240 | cpath.updatecache(file) | ||
1241 | |||
1159 | def strip_pkgd_prefix(f): | 1242 | def strip_pkgd_prefix(f): |
1160 | nonlocal dvar | 1243 | nonlocal dvar |
1161 | 1244 | ||
@@ -1194,11 +1277,24 @@ def process_split_and_strip_files(d): | |||
1194 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] | 1277 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] |
1195 | fpath = dvar + dest | 1278 | fpath = dvar + dest |
1196 | ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] | 1279 | ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] |
1197 | bb.utils.mkdirhier(os.path.dirname(fpath)) | 1280 | if os.access(ftarget, os.R_OK): |
1198 | # Only one hardlink of separated debug info file in each directory | 1281 | bb.utils.mkdirhier(os.path.dirname(fpath)) |
1199 | if not os.access(fpath, os.R_OK): | 1282 | # Only one hardlink of separated debug info file in each directory |
1200 | #bb.note("Link %s -> %s" % (fpath, ftarget)) | 1283 | if not os.access(fpath, os.R_OK): |
1201 | os.link(ftarget, fpath) | 1284 | #bb.note("Link %s -> %s" % (fpath, ftarget)) |
1285 | os.link(ftarget, fpath) | ||
1286 | elif (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): | ||
1287 | deststatic = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(file) + dv["staticappend"] | ||
1288 | fpath = dvar + deststatic | ||
1289 | ftarget = dvar + dv["staticlibdir"] + os.path.dirname(target) + dv["staticdir"] + "/" + os.path.basename(target) + dv["staticappend"] | ||
1290 | if os.access(ftarget, os.R_OK): | ||
1291 | bb.utils.mkdirhier(os.path.dirname(fpath)) | ||
1292 | # Only one hardlink of separated debug info file in each directory | ||
1293 | if not os.access(fpath, os.R_OK): | ||
1294 | #bb.note("Link %s -> %s" % (fpath, ftarget)) | ||
1295 | os.link(ftarget, fpath) | ||
1296 | else: | ||
1297 | bb.note("Unable to find inode link target %s" % (target)) | ||
1202 | 1298 | ||
1203 | # Create symlinks for all cases we were able to split symbols | 1299 | # Create symlinks for all cases we were able to split symbols |
1204 | for file in symlinks: | 1300 | for file in symlinks: |
@@ -1230,6 +1326,9 @@ def process_split_and_strip_files(d): | |||
1230 | # Process the dv["srcdir"] if requested... | 1326 | # Process the dv["srcdir"] if requested... |
1231 | # This copies and places the referenced sources for later debugging... | 1327 | # This copies and places the referenced sources for later debugging... |
1232 | copydebugsources(dv["srcdir"], sources, d) | 1328 | copydebugsources(dv["srcdir"], sources, d) |
1329 | |||
1330 | # Save source info to be accessible to other tasks | ||
1331 | save_debugsources_info(dv["srcdir"], results, d) | ||
1233 | # | 1332 | # |
1234 | # End of debug splitting | 1333 | # End of debug splitting |
1235 | # | 1334 | # |
@@ -1394,10 +1493,10 @@ def populate_packages(d): | |||
1394 | 1493 | ||
1395 | # Handle excluding packages with incompatible licenses | 1494 | # Handle excluding packages with incompatible licenses |
1396 | package_list = [] | 1495 | package_list = [] |
1496 | skipped_pkgs = oe.license.skip_incompatible_package_licenses(d, packages) | ||
1397 | for pkg in packages: | 1497 | for pkg in packages: |
1398 | licenses = d.getVar('_exclude_incompatible-' + pkg) | 1498 | if pkg in skipped_pkgs: |
1399 | if licenses: | 1499 | msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, skipped_pkgs[pkg]) |
1400 | msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses) | ||
1401 | oe.qa.handle_error("incompatible-license", msg, d) | 1500 | oe.qa.handle_error("incompatible-license", msg, d) |
1402 | else: | 1501 | else: |
1403 | package_list.append(pkg) | 1502 | package_list.append(pkg) |
@@ -1566,7 +1665,6 @@ def process_shlibs(pkgfiles, d): | |||
1566 | needs_ldconfig = False | 1665 | needs_ldconfig = False |
1567 | needed = set() | 1666 | needed = set() |
1568 | sonames = set() | 1667 | sonames = set() |
1569 | renames = [] | ||
1570 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | 1668 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') |
1571 | cmd = d.getVar('OBJDUMP') + " -p " + shlex.quote(file) + " 2>/dev/null" | 1669 | cmd = d.getVar('OBJDUMP') + " -p " + shlex.quote(file) + " 2>/dev/null" |
1572 | fd = os.popen(cmd) | 1670 | fd = os.popen(cmd) |
@@ -1594,11 +1692,9 @@ def process_shlibs(pkgfiles, d): | |||
1594 | sonames.add(prov) | 1692 | sonames.add(prov) |
1595 | if libdir_re.match(os.path.dirname(file)): | 1693 | if libdir_re.match(os.path.dirname(file)): |
1596 | needs_ldconfig = True | 1694 | needs_ldconfig = True |
1597 | if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname): | 1695 | return (needs_ldconfig, needed, sonames) |
1598 | renames.append((file, os.path.join(os.path.dirname(file), this_soname))) | ||
1599 | return (needs_ldconfig, needed, sonames, renames) | ||
1600 | 1696 | ||
1601 | def darwin_so(file, needed, sonames, renames, pkgver): | 1697 | def darwin_so(file, needed, sonames, pkgver): |
1602 | if not os.path.exists(file): | 1698 | if not os.path.exists(file): |
1603 | return | 1699 | return |
1604 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | 1700 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') |
@@ -1650,7 +1746,7 @@ def process_shlibs(pkgfiles, d): | |||
1650 | if name and name not in needed[pkg]: | 1746 | if name and name not in needed[pkg]: |
1651 | needed[pkg].add((name, file, tuple())) | 1747 | needed[pkg].add((name, file, tuple())) |
1652 | 1748 | ||
1653 | def mingw_dll(file, needed, sonames, renames, pkgver): | 1749 | def mingw_dll(file, needed, sonames, pkgver): |
1654 | if not os.path.exists(file): | 1750 | if not os.path.exists(file): |
1655 | return | 1751 | return |
1656 | 1752 | ||
@@ -1669,11 +1765,6 @@ def process_shlibs(pkgfiles, d): | |||
1669 | if dllname: | 1765 | if dllname: |
1670 | needed[pkg].add((dllname, file, tuple())) | 1766 | needed[pkg].add((dllname, file, tuple())) |
1671 | 1767 | ||
1672 | if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1": | ||
1673 | snap_symlinks = True | ||
1674 | else: | ||
1675 | snap_symlinks = False | ||
1676 | |||
1677 | needed = {} | 1768 | needed = {} |
1678 | 1769 | ||
1679 | shlib_provider = oe.package.read_shlib_providers(d) | 1770 | shlib_provider = oe.package.read_shlib_providers(d) |
@@ -1692,16 +1783,15 @@ def process_shlibs(pkgfiles, d): | |||
1692 | 1783 | ||
1693 | needed[pkg] = set() | 1784 | needed[pkg] = set() |
1694 | sonames = set() | 1785 | sonames = set() |
1695 | renames = [] | ||
1696 | linuxlist = [] | 1786 | linuxlist = [] |
1697 | for file in pkgfiles[pkg]: | 1787 | for file in pkgfiles[pkg]: |
1698 | soname = None | 1788 | soname = None |
1699 | if cpath.islink(file): | 1789 | if cpath.islink(file): |
1700 | continue | 1790 | continue |
1701 | if hostos.startswith("darwin"): | 1791 | if hostos.startswith("darwin"): |
1702 | darwin_so(file, needed, sonames, renames, pkgver) | 1792 | darwin_so(file, needed, sonames, pkgver) |
1703 | elif hostos.startswith("mingw"): | 1793 | elif hostos.startswith("mingw"): |
1704 | mingw_dll(file, needed, sonames, renames, pkgver) | 1794 | mingw_dll(file, needed, sonames, pkgver) |
1705 | elif os.access(file, os.X_OK) or lib_re.match(file): | 1795 | elif os.access(file, os.X_OK) or lib_re.match(file): |
1706 | linuxlist.append(file) | 1796 | linuxlist.append(file) |
1707 | 1797 | ||
@@ -1711,14 +1801,8 @@ def process_shlibs(pkgfiles, d): | |||
1711 | ldconfig = r[0] | 1801 | ldconfig = r[0] |
1712 | needed[pkg] |= r[1] | 1802 | needed[pkg] |= r[1] |
1713 | sonames |= r[2] | 1803 | sonames |= r[2] |
1714 | renames.extend(r[3]) | ||
1715 | needs_ldconfig = needs_ldconfig or ldconfig | 1804 | needs_ldconfig = needs_ldconfig or ldconfig |
1716 | 1805 | ||
1717 | for (old, new) in renames: | ||
1718 | bb.note("Renaming %s to %s" % (old, new)) | ||
1719 | bb.utils.rename(old, new) | ||
1720 | pkgfiles[pkg].remove(old) | ||
1721 | |||
1722 | shlibs_file = os.path.join(shlibswork_dir, pkg + ".list") | 1806 | shlibs_file = os.path.join(shlibswork_dir, pkg + ".list") |
1723 | if len(sonames): | 1807 | if len(sonames): |
1724 | with open(shlibs_file, 'w') as fd: | 1808 | with open(shlibs_file, 'w') as fd: |
@@ -1839,7 +1923,7 @@ def process_pkgconfig(pkgfiles, d): | |||
1839 | if m: | 1923 | if m: |
1840 | hdr = m.group(1) | 1924 | hdr = m.group(1) |
1841 | exp = pd.expand(m.group(2)) | 1925 | exp = pd.expand(m.group(2)) |
1842 | if hdr == 'Requires': | 1926 | if hdr == 'Requires' or hdr == 'Requires.private': |
1843 | pkgconfig_needed[pkg] += exp.replace(',', ' ').split() | 1927 | pkgconfig_needed[pkg] += exp.replace(',', ' ').split() |
1844 | continue | 1928 | continue |
1845 | m = var_re.match(l) | 1929 | m = var_re.match(l) |
diff --git a/meta/lib/oe/package_manager/__init__.py b/meta/lib/oe/package_manager/__init__.py index d3b2317894..2100a97c12 100644 --- a/meta/lib/oe/package_manager/__init__.py +++ b/meta/lib/oe/package_manager/__init__.py | |||
@@ -365,45 +365,43 @@ class PackageManager(object, metaclass=ABCMeta): | |||
365 | for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split(): | 365 | for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split(): |
366 | globs += (" " + complementary_linguas) % lang | 366 | globs += (" " + complementary_linguas) % lang |
367 | 367 | ||
368 | if globs is None: | 368 | if globs: |
369 | return | 369 | # we need to write the list of installed packages to a file because the |
370 | 370 | # oe-pkgdata-util reads it from a file | |
371 | # we need to write the list of installed packages to a file because the | 371 | with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs: |
372 | # oe-pkgdata-util reads it from a file | 372 | pkgs = self.list_installed() |
373 | with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs: | 373 | |
374 | pkgs = self.list_installed() | 374 | provided_pkgs = set() |
375 | 375 | for pkg in pkgs.values(): | |
376 | provided_pkgs = set() | 376 | provided_pkgs |= set(pkg.get('provs', [])) |
377 | for pkg in pkgs.values(): | 377 | |
378 | provided_pkgs |= set(pkg.get('provs', [])) | 378 | output = oe.utils.format_pkg_list(pkgs, "arch") |
379 | 379 | installed_pkgs.write(output) | |
380 | output = oe.utils.format_pkg_list(pkgs, "arch") | 380 | installed_pkgs.flush() |
381 | installed_pkgs.write(output) | 381 | |
382 | installed_pkgs.flush() | 382 | cmd = ["oe-pkgdata-util", |
383 | 383 | "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name, | |
384 | cmd = ["oe-pkgdata-util", | 384 | globs] |
385 | "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name, | 385 | exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY') |
386 | globs] | 386 | if exclude: |
387 | exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY') | 387 | cmd.extend(['--exclude=' + '|'.join(exclude.split())]) |
388 | if exclude: | 388 | try: |
389 | cmd.extend(['--exclude=' + '|'.join(exclude.split())]) | 389 | bb.note('Running %s' % cmd) |
390 | try: | 390 | proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
391 | bb.note('Running %s' % cmd) | 391 | stdout, stderr = proc.communicate() |
392 | proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) | 392 | if stderr: bb.note(stderr.decode("utf-8")) |
393 | stdout, stderr = proc.communicate() | 393 | complementary_pkgs = stdout.decode("utf-8") |
394 | if stderr: bb.note(stderr.decode("utf-8")) | 394 | complementary_pkgs = set(complementary_pkgs.split()) |
395 | complementary_pkgs = stdout.decode("utf-8") | 395 | skip_pkgs = sorted(complementary_pkgs & provided_pkgs) |
396 | complementary_pkgs = set(complementary_pkgs.split()) | 396 | install_pkgs = sorted(complementary_pkgs - provided_pkgs) |
397 | skip_pkgs = sorted(complementary_pkgs & provided_pkgs) | 397 | bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % ( |
398 | install_pkgs = sorted(complementary_pkgs - provided_pkgs) | 398 | ' '.join(install_pkgs), |
399 | bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % ( | 399 | ' '.join(skip_pkgs))) |
400 | ' '.join(install_pkgs), | 400 | self.install(install_pkgs, hard_depends_only=True) |
401 | ' '.join(skip_pkgs))) | 401 | except subprocess.CalledProcessError as e: |
402 | self.install(install_pkgs, hard_depends_only=True) | 402 | bb.fatal("Could not compute complementary packages list. Command " |
403 | except subprocess.CalledProcessError as e: | 403 | "'%s' returned %d:\n%s" % |
404 | bb.fatal("Could not compute complementary packages list. Command " | 404 | (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) |
405 | "'%s' returned %d:\n%s" % | ||
406 | (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
407 | 405 | ||
408 | if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1': | 406 | if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1': |
409 | target_arch = self.d.getVar('TARGET_ARCH') | 407 | target_arch = self.d.getVar('TARGET_ARCH') |
diff --git a/meta/lib/oe/package_manager/common_deb_ipk.py b/meta/lib/oe/package_manager/common_deb_ipk.py new file mode 100644 index 0000000000..6a1e28ee6f --- /dev/null +++ b/meta/lib/oe/package_manager/common_deb_ipk.py | |||
@@ -0,0 +1,97 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import glob | ||
8 | import os | ||
9 | import subprocess | ||
10 | import tempfile | ||
11 | |||
12 | import bb | ||
13 | |||
14 | from oe.package_manager import opkg_query, PackageManager | ||
15 | |||
16 | class OpkgDpkgPM(PackageManager): | ||
17 | def __init__(self, d, target_rootfs): | ||
18 | """ | ||
19 | This is an abstract class. Do not instantiate this directly. | ||
20 | """ | ||
21 | super(OpkgDpkgPM, self).__init__(d, target_rootfs) | ||
22 | |||
23 | def package_info(self, pkg): | ||
24 | """ | ||
25 | Returns a dictionary with the package info. | ||
26 | """ | ||
27 | raise NotImplementedError | ||
28 | |||
29 | def _common_package_info(self, cmd): | ||
30 | """ | ||
31 | "Returns a dictionary with the package info. | ||
32 | |||
33 | This method extracts the common parts for Opkg and Dpkg | ||
34 | """ | ||
35 | |||
36 | proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True) | ||
37 | if proc.returncode: | ||
38 | bb.fatal("Unable to list available packages. Command '%s' " | ||
39 | "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr)) | ||
40 | elif proc.stderr: | ||
41 | bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr)) | ||
42 | |||
43 | return opkg_query(proc.stdout) | ||
44 | |||
45 | def extract(self, pkg): | ||
46 | """ | ||
47 | Returns the path to a tmpdir where resides the contents of a package. | ||
48 | |||
49 | Deleting the tmpdir is responsability of the caller. | ||
50 | """ | ||
51 | pkg_info = self.package_info(pkg) | ||
52 | if not pkg_info: | ||
53 | bb.fatal("Unable to get information for package '%s' while " | ||
54 | "trying to extract the package." % pkg) | ||
55 | |||
56 | ar_cmd = bb.utils.which(os.getenv("PATH"), "ar") | ||
57 | tar_cmd = bb.utils.which(os.getenv("PATH"), "tar") | ||
58 | pkg_path = pkg_info[pkg]["filepath"] | ||
59 | |||
60 | if not os.path.isfile(pkg_path): | ||
61 | bb.fatal("Unable to extract package for '%s'." | ||
62 | "File %s doesn't exists" % (pkg, pkg_path)) | ||
63 | |||
64 | tmp_dir = tempfile.mkdtemp() | ||
65 | current_dir = os.getcwd() | ||
66 | os.chdir(tmp_dir) | ||
67 | |||
68 | try: | ||
69 | cmd = [ar_cmd, 'x', pkg_path] | ||
70 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
71 | data_tar = glob.glob("data.tar.*") | ||
72 | if len(data_tar) != 1: | ||
73 | bb.fatal("Unable to extract %s package. Failed to identify " | ||
74 | "data tarball (found tarballs '%s').", | ||
75 | pkg_path, data_tar) | ||
76 | data_tar = data_tar[0] | ||
77 | cmd = [tar_cmd, 'xf', data_tar] | ||
78 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
79 | except subprocess.CalledProcessError as e: | ||
80 | bb.utils.remove(tmp_dir, recurse=True) | ||
81 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
82 | "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
83 | except OSError as e: | ||
84 | bb.utils.remove(tmp_dir, recurse=True) | ||
85 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
86 | "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename)) | ||
87 | |||
88 | bb.note("Extracted %s to %s" % (pkg_path, tmp_dir)) | ||
89 | bb.utils.remove(os.path.join(tmp_dir, "debian-binary")) | ||
90 | bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz")) | ||
91 | bb.utils.remove(os.path.join(tmp_dir, data_tar)) | ||
92 | os.chdir(current_dir) | ||
93 | |||
94 | return tmp_dir | ||
95 | |||
96 | def _handle_intercept_failure(self, registered_pkgs): | ||
97 | self.mark_packages("unpacked", registered_pkgs.split()) | ||
diff --git a/meta/lib/oe/package_manager/deb/__init__.py b/meta/lib/oe/package_manager/deb/__init__.py index 0c23c884c1..e09e81e490 100644 --- a/meta/lib/oe/package_manager/deb/__init__.py +++ b/meta/lib/oe/package_manager/deb/__init__.py | |||
@@ -7,6 +7,7 @@ | |||
7 | import re | 7 | import re |
8 | import subprocess | 8 | import subprocess |
9 | from oe.package_manager import * | 9 | from oe.package_manager import * |
10 | from oe.package_manager.common_deb_ipk import OpkgDpkgPM | ||
10 | 11 | ||
11 | class DpkgIndexer(Indexer): | 12 | class DpkgIndexer(Indexer): |
12 | def _create_configs(self): | 13 | def _create_configs(self): |
@@ -111,72 +112,6 @@ class PMPkgsList(PkgsList): | |||
111 | 112 | ||
112 | return opkg_query(cmd_output) | 113 | return opkg_query(cmd_output) |
113 | 114 | ||
114 | class OpkgDpkgPM(PackageManager): | ||
115 | def __init__(self, d, target_rootfs): | ||
116 | """ | ||
117 | This is an abstract class. Do not instantiate this directly. | ||
118 | """ | ||
119 | super(OpkgDpkgPM, self).__init__(d, target_rootfs) | ||
120 | |||
121 | def package_info(self, pkg, cmd): | ||
122 | """ | ||
123 | Returns a dictionary with the package info. | ||
124 | |||
125 | This method extracts the common parts for Opkg and Dpkg | ||
126 | """ | ||
127 | |||
128 | try: | ||
129 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8") | ||
130 | except subprocess.CalledProcessError as e: | ||
131 | bb.fatal("Unable to list available packages. Command '%s' " | ||
132 | "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) | ||
133 | return opkg_query(output) | ||
134 | |||
135 | def extract(self, pkg, pkg_info): | ||
136 | """ | ||
137 | Returns the path to a tmpdir where resides the contents of a package. | ||
138 | |||
139 | Deleting the tmpdir is responsability of the caller. | ||
140 | |||
141 | This method extracts the common parts for Opkg and Dpkg | ||
142 | """ | ||
143 | |||
144 | ar_cmd = bb.utils.which(os.getenv("PATH"), "ar") | ||
145 | tar_cmd = bb.utils.which(os.getenv("PATH"), "tar") | ||
146 | pkg_path = pkg_info[pkg]["filepath"] | ||
147 | |||
148 | if not os.path.isfile(pkg_path): | ||
149 | bb.fatal("Unable to extract package for '%s'." | ||
150 | "File %s doesn't exists" % (pkg, pkg_path)) | ||
151 | |||
152 | tmp_dir = tempfile.mkdtemp() | ||
153 | current_dir = os.getcwd() | ||
154 | os.chdir(tmp_dir) | ||
155 | data_tar = 'data.tar.xz' | ||
156 | |||
157 | try: | ||
158 | cmd = [ar_cmd, 'x', pkg_path] | ||
159 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
160 | cmd = [tar_cmd, 'xf', data_tar] | ||
161 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
162 | except subprocess.CalledProcessError as e: | ||
163 | bb.utils.remove(tmp_dir, recurse=True) | ||
164 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
165 | "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
166 | except OSError as e: | ||
167 | bb.utils.remove(tmp_dir, recurse=True) | ||
168 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
169 | "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename)) | ||
170 | |||
171 | bb.note("Extracted %s to %s" % (pkg_path, tmp_dir)) | ||
172 | bb.utils.remove(os.path.join(tmp_dir, "debian-binary")) | ||
173 | bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz")) | ||
174 | os.chdir(current_dir) | ||
175 | |||
176 | return tmp_dir | ||
177 | |||
178 | def _handle_intercept_failure(self, registered_pkgs): | ||
179 | self.mark_packages("unpacked", registered_pkgs.split()) | ||
180 | 115 | ||
181 | class DpkgPM(OpkgDpkgPM): | 116 | class DpkgPM(OpkgDpkgPM): |
182 | def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True): | 117 | def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True): |
@@ -496,7 +431,7 @@ class DpkgPM(OpkgDpkgPM): | |||
496 | Returns a dictionary with the package info. | 431 | Returns a dictionary with the package info. |
497 | """ | 432 | """ |
498 | cmd = "%s show %s" % (self.apt_cache_cmd, pkg) | 433 | cmd = "%s show %s" % (self.apt_cache_cmd, pkg) |
499 | pkg_info = super(DpkgPM, self).package_info(pkg, cmd) | 434 | pkg_info = self._common_package_info(cmd) |
500 | 435 | ||
501 | pkg_arch = pkg_info[pkg]["pkgarch"] | 436 | pkg_arch = pkg_info[pkg]["pkgarch"] |
502 | pkg_filename = pkg_info[pkg]["filename"] | 437 | pkg_filename = pkg_info[pkg]["filename"] |
@@ -504,19 +439,3 @@ class DpkgPM(OpkgDpkgPM): | |||
504 | os.path.join(self.deploy_dir, pkg_arch, pkg_filename) | 439 | os.path.join(self.deploy_dir, pkg_arch, pkg_filename) |
505 | 440 | ||
506 | return pkg_info | 441 | return pkg_info |
507 | |||
508 | def extract(self, pkg): | ||
509 | """ | ||
510 | Returns the path to a tmpdir where resides the contents of a package. | ||
511 | |||
512 | Deleting the tmpdir is responsability of the caller. | ||
513 | """ | ||
514 | pkg_info = self.package_info(pkg) | ||
515 | if not pkg_info: | ||
516 | bb.fatal("Unable to get information for package '%s' while " | ||
517 | "trying to extract the package." % pkg) | ||
518 | |||
519 | tmp_dir = super(DpkgPM, self).extract(pkg, pkg_info) | ||
520 | bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz")) | ||
521 | |||
522 | return tmp_dir | ||
diff --git a/meta/lib/oe/package_manager/ipk/__init__.py b/meta/lib/oe/package_manager/ipk/__init__.py index 0f0038d00d..3d998e52ff 100644 --- a/meta/lib/oe/package_manager/ipk/__init__.py +++ b/meta/lib/oe/package_manager/ipk/__init__.py | |||
@@ -4,11 +4,11 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | 6 | ||
7 | import glob | ||
8 | import re | 7 | import re |
9 | import shutil | 8 | import shutil |
10 | import subprocess | 9 | import subprocess |
11 | from oe.package_manager import * | 10 | from oe.package_manager import * |
11 | from oe.package_manager.common_deb_ipk import OpkgDpkgPM | ||
12 | 12 | ||
13 | class OpkgIndexer(Indexer): | 13 | class OpkgIndexer(Indexer): |
14 | def write_index(self): | 14 | def write_index(self): |
@@ -91,81 +91,6 @@ class PMPkgsList(PkgsList): | |||
91 | return opkg_query(cmd_output) | 91 | return opkg_query(cmd_output) |
92 | 92 | ||
93 | 93 | ||
94 | |||
95 | class OpkgDpkgPM(PackageManager): | ||
96 | def __init__(self, d, target_rootfs): | ||
97 | """ | ||
98 | This is an abstract class. Do not instantiate this directly. | ||
99 | """ | ||
100 | super(OpkgDpkgPM, self).__init__(d, target_rootfs) | ||
101 | |||
102 | def package_info(self, pkg, cmd): | ||
103 | """ | ||
104 | Returns a dictionary with the package info. | ||
105 | |||
106 | This method extracts the common parts for Opkg and Dpkg | ||
107 | """ | ||
108 | |||
109 | proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True) | ||
110 | if proc.returncode: | ||
111 | bb.fatal("Unable to list available packages. Command '%s' " | ||
112 | "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr)) | ||
113 | elif proc.stderr: | ||
114 | bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr)) | ||
115 | |||
116 | return opkg_query(proc.stdout) | ||
117 | |||
118 | def extract(self, pkg, pkg_info): | ||
119 | """ | ||
120 | Returns the path to a tmpdir where resides the contents of a package. | ||
121 | |||
122 | Deleting the tmpdir is responsability of the caller. | ||
123 | |||
124 | This method extracts the common parts for Opkg and Dpkg | ||
125 | """ | ||
126 | |||
127 | ar_cmd = bb.utils.which(os.getenv("PATH"), "ar") | ||
128 | tar_cmd = bb.utils.which(os.getenv("PATH"), "tar") | ||
129 | pkg_path = pkg_info[pkg]["filepath"] | ||
130 | |||
131 | if not os.path.isfile(pkg_path): | ||
132 | bb.fatal("Unable to extract package for '%s'." | ||
133 | "File %s doesn't exists" % (pkg, pkg_path)) | ||
134 | |||
135 | tmp_dir = tempfile.mkdtemp() | ||
136 | current_dir = os.getcwd() | ||
137 | os.chdir(tmp_dir) | ||
138 | |||
139 | try: | ||
140 | cmd = [ar_cmd, 'x', pkg_path] | ||
141 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
142 | data_tar = glob.glob("data.tar.*") | ||
143 | if len(data_tar) != 1: | ||
144 | bb.fatal("Unable to extract %s package. Failed to identify " | ||
145 | "data tarball (found tarballs '%s').", | ||
146 | pkg_path, data_tar) | ||
147 | data_tar = data_tar[0] | ||
148 | cmd = [tar_cmd, 'xf', data_tar] | ||
149 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
150 | except subprocess.CalledProcessError as e: | ||
151 | bb.utils.remove(tmp_dir, recurse=True) | ||
152 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
153 | "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
154 | except OSError as e: | ||
155 | bb.utils.remove(tmp_dir, recurse=True) | ||
156 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
157 | "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename)) | ||
158 | |||
159 | bb.note("Extracted %s to %s" % (pkg_path, tmp_dir)) | ||
160 | bb.utils.remove(os.path.join(tmp_dir, "debian-binary")) | ||
161 | bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz")) | ||
162 | os.chdir(current_dir) | ||
163 | |||
164 | return tmp_dir | ||
165 | |||
166 | def _handle_intercept_failure(self, registered_pkgs): | ||
167 | self.mark_packages("unpacked", registered_pkgs.split()) | ||
168 | |||
169 | class OpkgPM(OpkgDpkgPM): | 94 | class OpkgPM(OpkgDpkgPM): |
170 | def __init__(self, d, target_rootfs, config_file, archs, task_name='target', ipk_repo_workdir="oe-rootfs-repo", filterbydependencies=True, prepare_index=True): | 95 | def __init__(self, d, target_rootfs, config_file, archs, task_name='target', ipk_repo_workdir="oe-rootfs-repo", filterbydependencies=True, prepare_index=True): |
171 | super(OpkgPM, self).__init__(d, target_rootfs) | 96 | super(OpkgPM, self).__init__(d, target_rootfs) |
@@ -491,7 +416,7 @@ class OpkgPM(OpkgDpkgPM): | |||
491 | Returns a dictionary with the package info. | 416 | Returns a dictionary with the package info. |
492 | """ | 417 | """ |
493 | cmd = "%s %s info %s" % (self.opkg_cmd, self.opkg_args, pkg) | 418 | cmd = "%s %s info %s" % (self.opkg_cmd, self.opkg_args, pkg) |
494 | pkg_info = super(OpkgPM, self).package_info(pkg, cmd) | 419 | pkg_info = self._common_package_info(cmd) |
495 | 420 | ||
496 | pkg_arch = pkg_info[pkg]["arch"] | 421 | pkg_arch = pkg_info[pkg]["arch"] |
497 | pkg_filename = pkg_info[pkg]["filename"] | 422 | pkg_filename = pkg_info[pkg]["filename"] |
@@ -499,19 +424,3 @@ class OpkgPM(OpkgDpkgPM): | |||
499 | os.path.join(self.deploy_dir, pkg_arch, pkg_filename) | 424 | os.path.join(self.deploy_dir, pkg_arch, pkg_filename) |
500 | 425 | ||
501 | return pkg_info | 426 | return pkg_info |
502 | |||
503 | def extract(self, pkg): | ||
504 | """ | ||
505 | Returns the path to a tmpdir where resides the contents of a package. | ||
506 | |||
507 | Deleting the tmpdir is responsability of the caller. | ||
508 | """ | ||
509 | pkg_info = self.package_info(pkg) | ||
510 | if not pkg_info: | ||
511 | bb.fatal("Unable to get information for package '%s' while " | ||
512 | "trying to extract the package." % pkg) | ||
513 | |||
514 | tmp_dir = super(OpkgPM, self).extract(pkg, pkg_info) | ||
515 | bb.utils.remove(os.path.join(tmp_dir, "data.tar.zst")) | ||
516 | |||
517 | return tmp_dir | ||
diff --git a/meta/lib/oe/package_manager/rpm/__init__.py b/meta/lib/oe/package_manager/rpm/__init__.py index f40c880af4..323ec5008f 100644 --- a/meta/lib/oe/package_manager/rpm/__init__.py +++ b/meta/lib/oe/package_manager/rpm/__init__.py | |||
@@ -393,8 +393,8 @@ class RpmPM(PackageManager): | |||
393 | # Strip file: prefix | 393 | # Strip file: prefix |
394 | pkg_path = pkg_name[5:] | 394 | pkg_path = pkg_name[5:] |
395 | 395 | ||
396 | cpio_cmd = bb.utils.which(os.getenv("PATH"), "cpio") | 396 | tar_cmd = bb.utils.which(os.getenv("PATH"), "tar") |
397 | rpm2cpio_cmd = bb.utils.which(os.getenv("PATH"), "rpm2cpio") | 397 | rpm2archive_cmd = bb.utils.which(os.getenv("PATH"), "rpm2archive") |
398 | 398 | ||
399 | if not os.path.isfile(pkg_path): | 399 | if not os.path.isfile(pkg_path): |
400 | bb.fatal("Unable to extract package for '%s'." | 400 | bb.fatal("Unable to extract package for '%s'." |
@@ -405,7 +405,7 @@ class RpmPM(PackageManager): | |||
405 | os.chdir(tmp_dir) | 405 | os.chdir(tmp_dir) |
406 | 406 | ||
407 | try: | 407 | try: |
408 | cmd = "%s %s | %s -idmv" % (rpm2cpio_cmd, pkg_path, cpio_cmd) | 408 | cmd = "%s -n %s | %s xv" % (rpm2archive_cmd, pkg_path, tar_cmd) |
409 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) | 409 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) |
410 | except subprocess.CalledProcessError as e: | 410 | except subprocess.CalledProcessError as e: |
411 | bb.utils.remove(tmp_dir, recurse=True) | 411 | bb.utils.remove(tmp_dir, recurse=True) |
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py index 2d1d6ddeb7..b6a10a930a 100644 --- a/meta/lib/oe/packagedata.py +++ b/meta/lib/oe/packagedata.py | |||
@@ -7,6 +7,7 @@ | |||
7 | import codecs | 7 | import codecs |
8 | import os | 8 | import os |
9 | import json | 9 | import json |
10 | import bb.parse | ||
10 | import bb.compress.zstd | 11 | import bb.compress.zstd |
11 | import oe.path | 12 | import oe.path |
12 | 13 | ||
@@ -64,6 +65,7 @@ def read_subpkgdata_dict(pkg, d): | |||
64 | ret[newvar] = subd[var] | 65 | ret[newvar] = subd[var] |
65 | return ret | 66 | return ret |
66 | 67 | ||
68 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
67 | def read_subpkgdata_extended(pkg, d): | 69 | def read_subpkgdata_extended(pkg, d): |
68 | import json | 70 | import json |
69 | import bb.compress.zstd | 71 | import bb.compress.zstd |
@@ -182,6 +184,7 @@ def runtime_mapping_rename(varname, pkg, d): | |||
182 | 184 | ||
183 | #bb.note("%s after: %s" % (varname, d.getVar(varname))) | 185 | #bb.note("%s after: %s" % (varname, d.getVar(varname))) |
184 | 186 | ||
187 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
185 | def emit_pkgdata(pkgfiles, d): | 188 | def emit_pkgdata(pkgfiles, d): |
186 | def process_postinst_on_target(pkg, mlprefix): | 189 | def process_postinst_on_target(pkg, mlprefix): |
187 | pkgval = d.getVar('PKG:%s' % pkg) | 190 | pkgval = d.getVar('PKG:%s' % pkg) |
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py index 58c6e34fe8..edd77196ee 100644 --- a/meta/lib/oe/patch.py +++ b/meta/lib/oe/patch.py | |||
@@ -462,21 +462,23 @@ class GitApplyTree(PatchTree): | |||
462 | return (tmpfile, cmd) | 462 | return (tmpfile, cmd) |
463 | 463 | ||
464 | @staticmethod | 464 | @staticmethod |
465 | def addNote(repo, ref, key, value=None): | 465 | def addNote(repo, ref, key, value=None, commituser=None, commitemail=None): |
466 | note = key + (": %s" % value if value else "") | 466 | note = key + (": %s" % value if value else "") |
467 | notes_ref = GitApplyTree.notes_ref | 467 | notes_ref = GitApplyTree.notes_ref |
468 | runcmd(["git", "config", "notes.rewriteMode", "ignore"], repo) | 468 | runcmd(["git", "config", "notes.rewriteMode", "ignore"], repo) |
469 | runcmd(["git", "config", "notes.displayRef", notes_ref, notes_ref], repo) | 469 | runcmd(["git", "config", "notes.displayRef", notes_ref, notes_ref], repo) |
470 | runcmd(["git", "config", "notes.rewriteRef", notes_ref, notes_ref], repo) | 470 | runcmd(["git", "config", "notes.rewriteRef", notes_ref, notes_ref], repo) |
471 | runcmd(["git", "notes", "--ref", notes_ref, "append", "-m", note, ref], repo) | 471 | cmd = ["git"] |
472 | GitApplyTree.gitCommandUserOptions(cmd, commituser, commitemail) | ||
473 | runcmd(cmd + ["notes", "--ref", notes_ref, "append", "-m", note, ref], repo) | ||
472 | 474 | ||
473 | @staticmethod | 475 | @staticmethod |
474 | def removeNote(repo, ref, key): | 476 | def removeNote(repo, ref, key, commituser=None, commitemail=None): |
475 | notes = GitApplyTree.getNotes(repo, ref) | 477 | notes = GitApplyTree.getNotes(repo, ref) |
476 | notes = {k: v for k, v in notes.items() if k != key and not k.startswith(key + ":")} | 478 | notes = {k: v for k, v in notes.items() if k != key and not k.startswith(key + ":")} |
477 | runcmd(["git", "notes", "--ref", GitApplyTree.notes_ref, "remove", "--ignore-missing", ref], repo) | 479 | runcmd(["git", "notes", "--ref", GitApplyTree.notes_ref, "remove", "--ignore-missing", ref], repo) |
478 | for note, value in notes.items(): | 480 | for note, value in notes.items(): |
479 | GitApplyTree.addNote(repo, ref, note, value) | 481 | GitApplyTree.addNote(repo, ref, note, value, commituser, commitemail) |
480 | 482 | ||
481 | @staticmethod | 483 | @staticmethod |
482 | def getNotes(repo, ref): | 484 | def getNotes(repo, ref): |
@@ -507,7 +509,7 @@ class GitApplyTree(PatchTree): | |||
507 | GitApplyTree.gitCommandUserOptions(cmd, d=d) | 509 | GitApplyTree.gitCommandUserOptions(cmd, d=d) |
508 | cmd += ["commit", "-m", subject, "--no-verify"] | 510 | cmd += ["commit", "-m", subject, "--no-verify"] |
509 | runcmd(cmd, dir) | 511 | runcmd(cmd, dir) |
510 | GitApplyTree.addNote(dir, "HEAD", GitApplyTree.ignore_commit) | 512 | GitApplyTree.addNote(dir, "HEAD", GitApplyTree.ignore_commit, d.getVar('PATCH_GIT_USER_NAME'), d.getVar('PATCH_GIT_USER_EMAIL')) |
511 | 513 | ||
512 | @staticmethod | 514 | @staticmethod |
513 | def extractPatches(tree, startcommits, outdir, paths=None): | 515 | def extractPatches(tree, startcommits, outdir, paths=None): |
@@ -654,7 +656,7 @@ class GitApplyTree(PatchTree): | |||
654 | raise | 656 | raise |
655 | finally: | 657 | finally: |
656 | if patch_applied: | 658 | if patch_applied: |
657 | GitApplyTree.addNote(self.dir, "HEAD", GitApplyTree.original_patch, os.path.basename(patch['file'])) | 659 | GitApplyTree.addNote(self.dir, "HEAD", GitApplyTree.original_patch, os.path.basename(patch['file']), self.commituser, self.commitemail) |
658 | 660 | ||
659 | 661 | ||
660 | class QuiltTree(PatchSet): | 662 | class QuiltTree(PatchSet): |
diff --git a/meta/lib/oe/path.py b/meta/lib/oe/path.py index 5d21cdcbdf..a1efe97d88 100644 --- a/meta/lib/oe/path.py +++ b/meta/lib/oe/path.py | |||
@@ -10,6 +10,8 @@ import shutil | |||
10 | import subprocess | 10 | import subprocess |
11 | import os.path | 11 | import os.path |
12 | 12 | ||
13 | import bb.parse | ||
14 | |||
13 | def join(*paths): | 15 | def join(*paths): |
14 | """Like os.path.join but doesn't treat absolute RHS specially""" | 16 | """Like os.path.join but doesn't treat absolute RHS specially""" |
15 | return os.path.normpath("/".join(paths)) | 17 | return os.path.normpath("/".join(paths)) |
@@ -77,6 +79,7 @@ def replace_absolute_symlinks(basedir, d): | |||
77 | os.remove(path) | 79 | os.remove(path) |
78 | os.symlink(base, path) | 80 | os.symlink(base, path) |
79 | 81 | ||
82 | @bb.parse.vardepsexclude("TOPDIR") | ||
80 | def format_display(path, metadata): | 83 | def format_display(path, metadata): |
81 | """ Prepare a path for display to the user. """ | 84 | """ Prepare a path for display to the user. """ |
82 | rel = relative(metadata.getVar("TOPDIR"), path) | 85 | rel = relative(metadata.getVar("TOPDIR"), path) |
diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py index f8ae3c743f..cd36cb5070 100644 --- a/meta/lib/oe/qa.py +++ b/meta/lib/oe/qa.py | |||
@@ -4,6 +4,7 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | 6 | ||
7 | import ast | ||
7 | import os, struct, mmap | 8 | import os, struct, mmap |
8 | 9 | ||
9 | class NotELFFileError(Exception): | 10 | class NotELFFileError(Exception): |
@@ -186,6 +187,20 @@ def write_error(type, error, d): | |||
186 | with open(logfile, "a+") as f: | 187 | with open(logfile, "a+") as f: |
187 | f.write("%s: %s [%s]\n" % (p, error, type)) | 188 | f.write("%s: %s [%s]\n" % (p, error, type)) |
188 | 189 | ||
190 | def handle_error_visitorcode(name, args): | ||
191 | execs = set() | ||
192 | contains = {} | ||
193 | warn = None | ||
194 | if isinstance(args[0], ast.Constant) and isinstance(args[0].value, str): | ||
195 | for i in ["ERROR_QA", "WARN_QA"]: | ||
196 | if i not in contains: | ||
197 | contains[i] = set() | ||
198 | contains[i].add(args[0].value) | ||
199 | else: | ||
200 | warn = args[0] | ||
201 | execs.add(name) | ||
202 | return contains, execs, warn | ||
203 | |||
189 | def handle_error(error_class, error_msg, d): | 204 | def handle_error(error_class, error_msg, d): |
190 | if error_class in (d.getVar("ERROR_QA") or "").split(): | 205 | if error_class in (d.getVar("ERROR_QA") or "").split(): |
191 | write_error(error_class, error_msg, d) | 206 | write_error(error_class, error_msg, d) |
@@ -198,12 +213,7 @@ def handle_error(error_class, error_msg, d): | |||
198 | else: | 213 | else: |
199 | bb.note("QA Issue: %s [%s]" % (error_msg, error_class)) | 214 | bb.note("QA Issue: %s [%s]" % (error_msg, error_class)) |
200 | return True | 215 | return True |
201 | 216 | handle_error.visitorcode = handle_error_visitorcode | |
202 | def add_message(messages, section, new_msg): | ||
203 | if section not in messages: | ||
204 | messages[section] = new_msg | ||
205 | else: | ||
206 | messages[section] = messages[section] + "\n" + new_msg | ||
207 | 217 | ||
208 | def exit_with_message_if_errors(message, d): | 218 | def exit_with_message_if_errors(message, d): |
209 | qa_fatal_errors = bb.utils.to_boolean(d.getVar("QA_ERRORS_FOUND"), False) | 219 | qa_fatal_errors = bb.utils.to_boolean(d.getVar("QA_ERRORS_FOUND"), False) |
diff --git a/meta/lib/oe/qemu.py b/meta/lib/oe/qemu.py new file mode 100644 index 0000000000..769865036c --- /dev/null +++ b/meta/lib/oe/qemu.py | |||
@@ -0,0 +1,54 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | def qemu_target_binary(d): | ||
8 | package_arch = d.getVar("PACKAGE_ARCH") | ||
9 | qemu_target_binary = (d.getVar("QEMU_TARGET_BINARY_%s" % package_arch) or "") | ||
10 | if qemu_target_binary: | ||
11 | return qemu_target_binary | ||
12 | |||
13 | target_arch = d.getVar("TARGET_ARCH") | ||
14 | if target_arch in ("i486", "i586", "i686"): | ||
15 | target_arch = "i386" | ||
16 | elif target_arch == "powerpc": | ||
17 | target_arch = "ppc" | ||
18 | elif target_arch == "powerpc64": | ||
19 | target_arch = "ppc64" | ||
20 | elif target_arch == "powerpc64le": | ||
21 | target_arch = "ppc64le" | ||
22 | |||
23 | return "qemu-" + target_arch | ||
24 | |||
25 | def qemu_wrapper_cmdline(d, rootfs_path, library_paths, qemu_options=None): | ||
26 | import string | ||
27 | |||
28 | package_arch = d.getVar("PACKAGE_ARCH") | ||
29 | if package_arch == "all": | ||
30 | return "false" | ||
31 | |||
32 | qemu_binary = qemu_target_binary(d) | ||
33 | if qemu_binary == "qemu-allarch": | ||
34 | qemu_binary = "qemuwrapper" | ||
35 | |||
36 | if qemu_options == None: | ||
37 | qemu_options = d.getVar("QEMU_OPTIONS") or "" | ||
38 | |||
39 | return "PSEUDO_UNLOAD=1 " + qemu_binary + " " + qemu_options + " -L " + rootfs_path\ | ||
40 | + " -E LD_LIBRARY_PATH=" + ":".join(library_paths) + " " | ||
41 | |||
42 | # Next function will return a string containing the command that is needed to | ||
43 | # to run a certain binary through qemu. For example, in order to make a certain | ||
44 | # postinstall scriptlet run at do_rootfs time and running the postinstall is | ||
45 | # architecture dependent, we can run it through qemu. For example, in the | ||
46 | # postinstall scriptlet, we could use the following: | ||
47 | # | ||
48 | # ${@qemu_run_binary(d, '$D', '/usr/bin/test_app')} [test_app arguments] | ||
49 | # | ||
50 | def qemu_run_binary(d, rootfs_path, binary): | ||
51 | libdir = rootfs_path + d.getVar("libdir", False) | ||
52 | base_libdir = rootfs_path + d.getVar("base_libdir", False) | ||
53 | |||
54 | return qemu_wrapper_cmdline(d, rootfs_path, [libdir, base_libdir]) + rootfs_path + binary | ||
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py index 2d69a33113..044f1bfa61 100644 --- a/meta/lib/oe/recipeutils.py +++ b/meta/lib/oe/recipeutils.py | |||
@@ -1070,10 +1070,15 @@ def get_recipe_upstream_version(rd): | |||
1070 | ud = bb.fetch2.FetchData(src_uri, rd) | 1070 | ud = bb.fetch2.FetchData(src_uri, rd) |
1071 | if rd.getVar("UPSTREAM_CHECK_COMMITS") == "1": | 1071 | if rd.getVar("UPSTREAM_CHECK_COMMITS") == "1": |
1072 | bb.fetch2.get_srcrev(rd) | 1072 | bb.fetch2.get_srcrev(rd) |
1073 | revision = ud.method.latest_revision(ud, rd, 'default') | 1073 | upversion = None |
1074 | upversion = pv | 1074 | revision = None |
1075 | if revision != rd.getVar("SRCREV"): | 1075 | try: |
1076 | upversion = upversion + "-new-commits-available" | 1076 | revision = ud.method.latest_revision(ud, rd, 'default') |
1077 | upversion = pv | ||
1078 | if revision != rd.getVar("SRCREV"): | ||
1079 | upversion = upversion + "-new-commits-available" | ||
1080 | except bb.fetch2.FetchError as e: | ||
1081 | bb.warn("Unable to obtain latest revision: {}".format(e)) | ||
1077 | else: | 1082 | else: |
1078 | pupver = ud.method.latest_versionstring(ud, rd) | 1083 | pupver = ud.method.latest_versionstring(ud, rd) |
1079 | (upversion, revision) = pupver | 1084 | (upversion, revision) = pupver |
@@ -1112,7 +1117,7 @@ def _get_recipe_upgrade_status(data): | |||
1112 | maintainer = data.getVar('RECIPE_MAINTAINER') | 1117 | maintainer = data.getVar('RECIPE_MAINTAINER') |
1113 | no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON') | 1118 | no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON') |
1114 | 1119 | ||
1115 | return (pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason) | 1120 | return {'pn':pn, 'status':status, 'cur_ver':cur_ver, 'next_ver':next_ver, 'maintainer':maintainer, 'revision':revision, 'no_upgrade_reason':no_upgrade_reason} |
1116 | 1121 | ||
1117 | def get_recipe_upgrade_status(recipes=None): | 1122 | def get_recipe_upgrade_status(recipes=None): |
1118 | pkgs_list = [] | 1123 | pkgs_list = [] |
@@ -1154,6 +1159,7 @@ def get_recipe_upgrade_status(recipes=None): | |||
1154 | if not recipes: | 1159 | if not recipes: |
1155 | recipes = tinfoil.all_recipe_files(variants=False) | 1160 | recipes = tinfoil.all_recipe_files(variants=False) |
1156 | 1161 | ||
1162 | recipeincludes = {} | ||
1157 | for fn in recipes: | 1163 | for fn in recipes: |
1158 | try: | 1164 | try: |
1159 | if fn.startswith("/"): | 1165 | if fn.startswith("/"): |
@@ -1178,8 +1184,65 @@ def get_recipe_upgrade_status(recipes=None): | |||
1178 | 1184 | ||
1179 | data_copy_list.append(data_copy) | 1185 | data_copy_list.append(data_copy) |
1180 | 1186 | ||
1187 | recipeincludes[data.getVar('FILE')] = {'bbincluded':data.getVar('BBINCLUDED').split(),'pn':data.getVar('PN')} | ||
1188 | |||
1181 | from concurrent.futures import ProcessPoolExecutor | 1189 | from concurrent.futures import ProcessPoolExecutor |
1182 | with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor: | 1190 | with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor: |
1183 | pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list) | 1191 | pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list) |
1184 | 1192 | ||
1185 | return pkgs_list | 1193 | return _group_recipes(pkgs_list, _get_common_include_recipes(recipeincludes)) |
1194 | |||
1195 | def get_common_include_recipes(): | ||
1196 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
1197 | tinfoil.prepare(config_only=False) | ||
1198 | |||
1199 | recipes = tinfoil.all_recipe_files(variants=False) | ||
1200 | |||
1201 | recipeincludes = {} | ||
1202 | for fn in recipes: | ||
1203 | data = tinfoil.parse_recipe_file(fn) | ||
1204 | recipeincludes[fn] = {'bbincluded':data.getVar('BBINCLUDED').split(),'pn':data.getVar('PN')} | ||
1205 | return _get_common_include_recipes(recipeincludes) | ||
1206 | |||
1207 | def _get_common_include_recipes(recipeincludes_all): | ||
1208 | recipeincludes = {} | ||
1209 | for fn,data in recipeincludes_all.items(): | ||
1210 | bbincluded_filtered = [i for i in data['bbincluded'] if os.path.dirname(i) == os.path.dirname(fn) and i != fn] | ||
1211 | if bbincluded_filtered: | ||
1212 | recipeincludes[data['pn']] = bbincluded_filtered | ||
1213 | |||
1214 | recipeincludes_inverted = {} | ||
1215 | for k,v in recipeincludes.items(): | ||
1216 | for i in v: | ||
1217 | recipeincludes_inverted.setdefault(i,set()).add(k) | ||
1218 | |||
1219 | recipeincludes_inverted_filtered = {k:v for k,v in recipeincludes_inverted.items() if len(v) > 1} | ||
1220 | |||
1221 | recipes_with_shared_includes = list() | ||
1222 | for v in recipeincludes_inverted_filtered.values(): | ||
1223 | recipeset = v | ||
1224 | for v1 in recipeincludes_inverted_filtered.values(): | ||
1225 | if recipeset.intersection(v1): | ||
1226 | recipeset.update(v1) | ||
1227 | if recipeset not in recipes_with_shared_includes: | ||
1228 | recipes_with_shared_includes.append(recipeset) | ||
1229 | |||
1230 | return recipes_with_shared_includes | ||
1231 | |||
1232 | def _group_recipes(recipes, groups): | ||
1233 | recipedict = {} | ||
1234 | for r in recipes: | ||
1235 | recipedict[r['pn']] = r | ||
1236 | |||
1237 | recipegroups = [] | ||
1238 | for g in groups: | ||
1239 | recipeset = [] | ||
1240 | for r in g: | ||
1241 | if r in recipedict.keys(): | ||
1242 | recipeset.append(recipedict[r]) | ||
1243 | del recipedict[r] | ||
1244 | recipegroups.append(recipeset) | ||
1245 | |||
1246 | for r in recipedict.values(): | ||
1247 | recipegroups.append([r]) | ||
1248 | return recipegroups | ||
diff --git a/meta/lib/oe/reproducible.py b/meta/lib/oe/reproducible.py index 1957c97434..0270024a83 100644 --- a/meta/lib/oe/reproducible.py +++ b/meta/lib/oe/reproducible.py | |||
@@ -47,7 +47,7 @@ import bb | |||
47 | # 2. If there is a git checkout, use the last git commit timestamp. | 47 | # 2. If there is a git checkout, use the last git commit timestamp. |
48 | # Git does not preserve file timestamps on checkout. | 48 | # Git does not preserve file timestamps on checkout. |
49 | # | 49 | # |
50 | # 3. Use the mtime of "known" files such as NEWS, CHANGLELOG, ... | 50 | # 3. Use the mtime of "known" files such as NEWS, CHANGELOG, ... |
51 | # This works for well-kept repositories distributed via tarball. | 51 | # This works for well-kept repositories distributed via tarball. |
52 | # | 52 | # |
53 | # 4. Use the modification time of the youngest file in the source tree, if | 53 | # 4. Use the modification time of the youngest file in the source tree, if |
@@ -75,10 +75,11 @@ def get_source_date_epoch_from_known_files(d, sourcedir): | |||
75 | return source_date_epoch | 75 | return source_date_epoch |
76 | 76 | ||
77 | def find_git_folder(d, sourcedir): | 77 | def find_git_folder(d, sourcedir): |
78 | # First guess: UNPACKDIR/git | 78 | # First guess: UNPACKDIR/BB_GIT_DEFAULT_DESTSUFFIX |
79 | # This is the default git fetcher unpack path | 79 | # This is the default git fetcher unpack path |
80 | unpackdir = d.getVar('UNPACKDIR') | 80 | unpackdir = d.getVar('UNPACKDIR') |
81 | gitpath = os.path.join(unpackdir, "git/.git") | 81 | default_destsuffix = d.getVar('BB_GIT_DEFAULT_DESTSUFFIX') |
82 | gitpath = os.path.join(unpackdir, default_destsuffix, ".git") | ||
82 | if os.path.isdir(gitpath): | 83 | if os.path.isdir(gitpath): |
83 | return gitpath | 84 | return gitpath |
84 | 85 | ||
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py index 8cd48f9450..14befac8fa 100644 --- a/meta/lib/oe/rootfs.py +++ b/meta/lib/oe/rootfs.py | |||
@@ -199,12 +199,12 @@ class Rootfs(object, metaclass=ABCMeta): | |||
199 | if command in commands: | 199 | if command in commands: |
200 | commands.remove(command) | 200 | commands.remove(command) |
201 | commands.append(command) | 201 | commands.append(command) |
202 | return "".join(commands) | 202 | return " ".join(commands) |
203 | 203 | ||
204 | # We want this to run as late as possible, in particular after | 204 | # We want this to run as late as possible, in particular after |
205 | # systemd_sysusers_create and set_user_group. Using :append is not enough | 205 | # systemd_sysusers_create and set_user_group. Using :append is not enough |
206 | make_last("tidy_shadowutils_files", post_process_cmds) | 206 | post_process_cmds = make_last("tidy_shadowutils_files", post_process_cmds) |
207 | make_last("rootfs_reproducible", post_process_cmds) | 207 | post_process_cmds = make_last("rootfs_reproducible", post_process_cmds) |
208 | 208 | ||
209 | execute_pre_post_process(self.d, pre_process_cmds) | 209 | execute_pre_post_process(self.d, pre_process_cmds) |
210 | 210 | ||
@@ -269,7 +269,11 @@ class Rootfs(object, metaclass=ABCMeta): | |||
269 | self.pm.remove(["run-postinsts"]) | 269 | self.pm.remove(["run-postinsts"]) |
270 | 270 | ||
271 | image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", | 271 | image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", |
272 | True, False, self.d) and \ | ||
273 | not bb.utils.contains("IMAGE_FEATURES", | ||
274 | "read-only-rootfs-delayed-postinsts", | ||
272 | True, False, self.d) | 275 | True, False, self.d) |
276 | |||
273 | image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE') | 277 | image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE') |
274 | 278 | ||
275 | if image_rorfs or image_rorfs_force == "1": | 279 | if image_rorfs or image_rorfs_force == "1": |
@@ -358,11 +362,12 @@ class Rootfs(object, metaclass=ABCMeta): | |||
358 | 362 | ||
359 | versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver) | 363 | versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver) |
360 | 364 | ||
361 | bb.utils.mkdirhier(versioned_modules_dir) | 365 | if os.path.exists(versioned_modules_dir): |
362 | 366 | bb.note("Running depmodwrapper for %s ..." % versioned_modules_dir) | |
363 | bb.note("Running depmodwrapper for %s ..." % versioned_modules_dir) | 367 | if self._exec_shell_cmd(['depmodwrapper', '-a', '-b', self.image_rootfs, kernel_ver, kernel_package_name]): |
364 | if self._exec_shell_cmd(['depmodwrapper', '-a', '-b', self.image_rootfs, kernel_ver, kernel_package_name]): | 368 | bb.fatal("Kernel modules dependency generation failed") |
365 | bb.fatal("Kernel modules dependency generation failed") | 369 | else: |
370 | bb.note("Not running depmodwrapper for %s since directory does not exist" % versioned_modules_dir) | ||
366 | 371 | ||
367 | """ | 372 | """ |
368 | Create devfs: | 373 | Create devfs: |
@@ -423,12 +428,3 @@ def image_list_installed_packages(d, rootfs_dir=None): | |||
423 | import importlib | 428 | import importlib |
424 | cls = importlib.import_module('oe.package_manager.' + img_type) | 429 | cls = importlib.import_module('oe.package_manager.' + img_type) |
425 | return cls.PMPkgsList(d, rootfs_dir).list_pkgs() | 430 | return cls.PMPkgsList(d, rootfs_dir).list_pkgs() |
426 | |||
427 | if __name__ == "__main__": | ||
428 | """ | ||
429 | We should be able to run this as a standalone script, from outside bitbake | ||
430 | environment. | ||
431 | """ | ||
432 | """ | ||
433 | TBD | ||
434 | """ | ||
diff --git a/meta/lib/rootfspostcommands.py b/meta/lib/oe/rootfspostcommands.py index 5386eea409..5386eea409 100644 --- a/meta/lib/rootfspostcommands.py +++ b/meta/lib/oe/rootfspostcommands.py | |||
diff --git a/meta/lib/oe/rust.py b/meta/lib/oe/rust.py index 185553eeeb..1dc9cf150d 100644 --- a/meta/lib/oe/rust.py +++ b/meta/lib/oe/rust.py | |||
@@ -8,6 +8,4 @@ | |||
8 | def arch_to_rust_arch(arch): | 8 | def arch_to_rust_arch(arch): |
9 | if arch == "ppc64le": | 9 | if arch == "ppc64le": |
10 | return "powerpc64le" | 10 | return "powerpc64le" |
11 | if arch in ('riscv32', 'riscv64'): | ||
12 | return arch + 'gc' | ||
13 | return arch | 11 | return arch |
diff --git a/meta/lib/oe/sbom30.py b/meta/lib/oe/sbom30.py new file mode 100644 index 0000000000..227ac51877 --- /dev/null +++ b/meta/lib/oe/sbom30.py | |||
@@ -0,0 +1,1096 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | from pathlib import Path | ||
8 | |||
9 | import oe.spdx30 | ||
10 | import bb | ||
11 | import re | ||
12 | import hashlib | ||
13 | import uuid | ||
14 | import os | ||
15 | import oe.spdx_common | ||
16 | from datetime import datetime, timezone | ||
17 | |||
18 | OE_SPDX_BASE = "https://rdf.openembedded.org/spdx/3.0/" | ||
19 | |||
20 | VEX_VERSION = "1.0.0" | ||
21 | |||
22 | SPDX_BUILD_TYPE = "http://openembedded.org/bitbake" | ||
23 | |||
24 | OE_ALIAS_PREFIX = "http://spdxdocs.org/openembedded-alias/by-doc-hash/" | ||
25 | OE_DOC_ALIAS_PREFIX = "http://spdxdocs.org/openembedded-alias/doc/" | ||
26 | |||
27 | |||
28 | @oe.spdx30.register(OE_SPDX_BASE + "id-alias") | ||
29 | class OEIdAliasExtension(oe.spdx30.extension_Extension): | ||
30 | """ | ||
31 | This extension allows an Element to provide an internal alias for the SPDX | ||
32 | ID. Since SPDX requires unique URIs for each SPDX ID, most of the objects | ||
33 | created have a unique UUID namespace and the unihash of the task encoded in | ||
34 | their SPDX ID. However, this causes a problem for referencing documents | ||
35 | across recipes, since the taskhash of a dependency may not factor into the | ||
36 | taskhash of the current task and thus the current task won't rebuild and | ||
37 | see the new SPDX ID when the dependency changes (e.g. ABI safe recipes and | ||
38 | tasks). | ||
39 | |||
40 | To help work around this, this extension provides a non-unique alias for an | ||
41 | Element by which it can be referenced from other tasks/recipes. When a | ||
42 | final SBoM is created, references to these aliases will be replaced with | ||
43 | the actual unique SPDX ID. | ||
44 | |||
45 | Most Elements will automatically get an alias created when they are written | ||
46 | out if they do not already have one. To suppress the creation of an alias, | ||
47 | add an extension with a blank `alias` property. | ||
48 | |||
49 | |||
50 | It is in internal extension that should be removed when writing out a final | ||
51 | SBoM | ||
52 | """ | ||
53 | |||
54 | CLOSED = True | ||
55 | INTERNAL = True | ||
56 | |||
57 | @classmethod | ||
58 | def _register_props(cls): | ||
59 | super()._register_props() | ||
60 | cls._add_property( | ||
61 | "alias", | ||
62 | oe.spdx30.StringProp(), | ||
63 | OE_SPDX_BASE + "alias", | ||
64 | max_count=1, | ||
65 | ) | ||
66 | |||
67 | cls._add_property( | ||
68 | "link_name", | ||
69 | oe.spdx30.StringProp(), | ||
70 | OE_SPDX_BASE + "link-name", | ||
71 | max_count=1, | ||
72 | ) | ||
73 | |||
74 | |||
75 | @oe.spdx30.register(OE_SPDX_BASE + "file-name-alias") | ||
76 | class OEFileNameAliasExtension(oe.spdx30.extension_Extension): | ||
77 | CLOSED = True | ||
78 | INTERNAL = True | ||
79 | |||
80 | @classmethod | ||
81 | def _register_props(cls): | ||
82 | super()._register_props() | ||
83 | cls._add_property( | ||
84 | "aliases", | ||
85 | oe.spdx30.ListProp(oe.spdx30.StringProp()), | ||
86 | OE_SPDX_BASE + "filename-alias", | ||
87 | ) | ||
88 | |||
89 | |||
90 | @oe.spdx30.register(OE_SPDX_BASE + "license-scanned") | ||
91 | class OELicenseScannedExtension(oe.spdx30.extension_Extension): | ||
92 | """ | ||
93 | The presence of this extension means the file has already been scanned for | ||
94 | license information | ||
95 | """ | ||
96 | |||
97 | CLOSED = True | ||
98 | INTERNAL = True | ||
99 | |||
100 | |||
101 | @oe.spdx30.register(OE_SPDX_BASE + "document-extension") | ||
102 | class OEDocumentExtension(oe.spdx30.extension_Extension): | ||
103 | """ | ||
104 | This extension is added to a SpdxDocument to indicate various useful bits | ||
105 | of information about its contents | ||
106 | """ | ||
107 | |||
108 | CLOSED = True | ||
109 | |||
110 | @classmethod | ||
111 | def _register_props(cls): | ||
112 | super()._register_props() | ||
113 | cls._add_property( | ||
114 | "is_native", | ||
115 | oe.spdx30.BooleanProp(), | ||
116 | OE_SPDX_BASE + "is-native", | ||
117 | max_count=1, | ||
118 | ) | ||
119 | |||
120 | |||
121 | def spdxid_hash(*items): | ||
122 | h = hashlib.md5() | ||
123 | for i in items: | ||
124 | if isinstance(i, oe.spdx30.Element): | ||
125 | h.update(i._id.encode("utf-8")) | ||
126 | else: | ||
127 | h.update(i.encode("utf-8")) | ||
128 | return h.hexdigest() | ||
129 | |||
130 | |||
131 | def spdx_sde(d): | ||
132 | sde = d.getVar("SOURCE_DATE_EPOCH") | ||
133 | if not sde: | ||
134 | return datetime.now(timezone.utc) | ||
135 | |||
136 | return datetime.fromtimestamp(int(sde), timezone.utc) | ||
137 | |||
138 | |||
139 | def get_element_link_id(e): | ||
140 | """ | ||
141 | Get the string ID which should be used to link to an Element. If the | ||
142 | element has an alias, that will be preferred, otherwise its SPDX ID will be | ||
143 | used. | ||
144 | """ | ||
145 | ext = get_alias(e) | ||
146 | if ext is not None and ext.alias: | ||
147 | return ext.alias | ||
148 | return e._id | ||
149 | |||
150 | |||
151 | def get_alias(obj): | ||
152 | for ext in obj.extension: | ||
153 | if not isinstance(ext, OEIdAliasExtension): | ||
154 | continue | ||
155 | return ext | ||
156 | |||
157 | return None | ||
158 | |||
159 | |||
160 | def hash_id(_id): | ||
161 | return hashlib.sha256(_id.encode("utf-8")).hexdigest() | ||
162 | |||
163 | |||
164 | def to_list(l): | ||
165 | if isinstance(l, set): | ||
166 | l = sorted(list(l)) | ||
167 | |||
168 | if not isinstance(l, (list, tuple)): | ||
169 | raise TypeError("Must be a list or tuple. Got %s" % type(l)) | ||
170 | |||
171 | return l | ||
172 | |||
173 | |||
174 | class ObjectSet(oe.spdx30.SHACLObjectSet): | ||
175 | def __init__(self, d): | ||
176 | super().__init__() | ||
177 | self.d = d | ||
178 | self.alias_prefix = None | ||
179 | |||
180 | def create_index(self): | ||
181 | self.by_sha256_hash = {} | ||
182 | super().create_index() | ||
183 | |||
184 | def add_index(self, obj): | ||
185 | # Check that all elements are given an ID before being inserted | ||
186 | if isinstance(obj, oe.spdx30.Element): | ||
187 | if not obj._id: | ||
188 | raise ValueError("Element missing ID") | ||
189 | |||
190 | alias_ext = get_alias(obj) | ||
191 | if alias_ext is not None and alias_ext.alias: | ||
192 | self.obj_by_id[alias_ext.alias] = obj | ||
193 | |||
194 | for v in obj.verifiedUsing: | ||
195 | if not isinstance(v, oe.spdx30.Hash): | ||
196 | continue | ||
197 | |||
198 | if v.algorithm != oe.spdx30.HashAlgorithm.sha256: | ||
199 | continue | ||
200 | |||
201 | self.by_sha256_hash.setdefault(v.hashValue, set()).add(obj) | ||
202 | |||
203 | super().add_index(obj) | ||
204 | if isinstance(obj, oe.spdx30.SpdxDocument): | ||
205 | self.doc = obj | ||
206 | alias_ext = get_alias(obj) | ||
207 | if alias_ext is not None and alias_ext.alias: | ||
208 | self.alias_prefix = OE_ALIAS_PREFIX + hash_id(alias_ext.alias) + "/" | ||
209 | |||
210 | def __filter_obj(self, obj, attr_filter): | ||
211 | return all(getattr(obj, k) == v for k, v in attr_filter.items()) | ||
212 | |||
213 | def foreach_filter(self, typ, *, match_subclass=True, **attr_filter): | ||
214 | for obj in self.foreach_type(typ, match_subclass=match_subclass): | ||
215 | if self.__filter_obj(obj, attr_filter): | ||
216 | yield obj | ||
217 | |||
218 | def find_filter(self, typ, *, match_subclass=True, **attr_filter): | ||
219 | for obj in self.foreach_filter( | ||
220 | typ, match_subclass=match_subclass, **attr_filter | ||
221 | ): | ||
222 | return obj | ||
223 | return None | ||
224 | |||
225 | def foreach_root(self, typ, **attr_filter): | ||
226 | for obj in self.doc.rootElement: | ||
227 | if not isinstance(obj, typ): | ||
228 | continue | ||
229 | |||
230 | if self.__filter_obj(obj, attr_filter): | ||
231 | yield obj | ||
232 | |||
233 | def find_root(self, typ, **attr_filter): | ||
234 | for obj in self.foreach_root(typ, **attr_filter): | ||
235 | return obj | ||
236 | return None | ||
237 | |||
238 | def add_root(self, obj): | ||
239 | self.add(obj) | ||
240 | self.doc.rootElement.append(obj) | ||
241 | return obj | ||
242 | |||
243 | def is_native(self): | ||
244 | for e in self.doc.extension: | ||
245 | if not isinstance(e, oe.sbom30.OEDocumentExtension): | ||
246 | continue | ||
247 | |||
248 | if e.is_native is not None: | ||
249 | return e.is_native | ||
250 | |||
251 | return False | ||
252 | |||
253 | def set_is_native(self, is_native): | ||
254 | for e in self.doc.extension: | ||
255 | if not isinstance(e, oe.sbom30.OEDocumentExtension): | ||
256 | continue | ||
257 | |||
258 | e.is_native = is_native | ||
259 | return | ||
260 | |||
261 | if is_native: | ||
262 | self.doc.extension.append(oe.sbom30.OEDocumentExtension(is_native=True)) | ||
263 | |||
264 | def add_aliases(self): | ||
265 | for o in self.foreach_type(oe.spdx30.Element): | ||
266 | self.set_element_alias(o) | ||
267 | |||
268 | def new_alias_id(self, obj, replace): | ||
269 | unihash = self.d.getVar("BB_UNIHASH") | ||
270 | namespace = self.get_namespace() | ||
271 | if unihash not in obj._id: | ||
272 | bb.warn(f"Unihash {unihash} not found in {obj._id}") | ||
273 | return None | ||
274 | |||
275 | if namespace not in obj._id: | ||
276 | bb.warn(f"Namespace {namespace} not found in {obj._id}") | ||
277 | return None | ||
278 | |||
279 | return obj._id.replace(unihash, "UNIHASH").replace( | ||
280 | namespace, replace + self.d.getVar("PN") | ||
281 | ) | ||
282 | |||
283 | def remove_internal_extensions(self): | ||
284 | def remove(o): | ||
285 | o.extension = [e for e in o.extension if not getattr(e, "INTERNAL", False)] | ||
286 | |||
287 | for o in self.foreach_type(oe.spdx30.Element): | ||
288 | remove(o) | ||
289 | |||
290 | if self.doc: | ||
291 | remove(self.doc) | ||
292 | |||
293 | def get_namespace(self): | ||
294 | namespace_uuid = uuid.uuid5( | ||
295 | uuid.NAMESPACE_DNS, self.d.getVar("SPDX_UUID_NAMESPACE") | ||
296 | ) | ||
297 | pn = self.d.getVar("PN") | ||
298 | return "%s/%s-%s" % ( | ||
299 | self.d.getVar("SPDX_NAMESPACE_PREFIX"), | ||
300 | pn, | ||
301 | str(uuid.uuid5(namespace_uuid, pn)), | ||
302 | ) | ||
303 | |||
304 | def set_element_alias(self, e): | ||
305 | if not e._id or e._id.startswith("_:"): | ||
306 | return | ||
307 | |||
308 | alias_ext = get_alias(e) | ||
309 | if alias_ext is None: | ||
310 | alias_id = self.new_alias_id(e, self.alias_prefix) | ||
311 | if alias_id is not None: | ||
312 | e.extension.append(OEIdAliasExtension(alias=alias_id)) | ||
313 | elif ( | ||
314 | alias_ext.alias | ||
315 | and not isinstance(e, oe.spdx30.SpdxDocument) | ||
316 | and not alias_ext.alias.startswith(self.alias_prefix) | ||
317 | ): | ||
318 | bb.warn( | ||
319 | f"Element {e._id} has alias {alias_ext.alias}, but it should have prefix {self.alias_prefix}" | ||
320 | ) | ||
321 | |||
322 | def new_spdxid(self, *suffix, include_unihash=True): | ||
323 | items = [self.get_namespace()] | ||
324 | if include_unihash: | ||
325 | unihash = self.d.getVar("BB_UNIHASH") | ||
326 | items.append(unihash) | ||
327 | items.extend(re.sub(r"[^a-zA-Z0-9_-]", "_", s) for s in suffix) | ||
328 | return "/".join(items) | ||
329 | |||
330 | def new_import(self, key): | ||
331 | base = f"SPDX_IMPORTS_{key}" | ||
332 | spdxid = self.d.getVar(f"{base}_spdxid") | ||
333 | if not spdxid: | ||
334 | bb.fatal(f"{key} is not a valid SPDX_IMPORTS key") | ||
335 | |||
336 | for i in self.doc.import_: | ||
337 | if i.externalSpdxId == spdxid: | ||
338 | # Already imported | ||
339 | return spdxid | ||
340 | |||
341 | m = oe.spdx30.ExternalMap(externalSpdxId=spdxid) | ||
342 | |||
343 | uri = self.d.getVar(f"{base}_uri") | ||
344 | if uri: | ||
345 | m.locationHint = uri | ||
346 | |||
347 | for pyname, algorithm in oe.spdx30.HashAlgorithm.NAMED_INDIVIDUALS.items(): | ||
348 | value = self.d.getVar(f"{base}_hash_{pyname}") | ||
349 | if value: | ||
350 | m.verifiedUsing.append( | ||
351 | oe.spdx30.Hash( | ||
352 | algorithm=algorithm, | ||
353 | hashValue=value, | ||
354 | ) | ||
355 | ) | ||
356 | |||
357 | self.doc.import_.append(m) | ||
358 | return spdxid | ||
359 | |||
360 | def new_agent(self, varname, *, creation_info=None, add=True): | ||
361 | ref_varname = self.d.getVar(f"{varname}_ref") | ||
362 | if ref_varname: | ||
363 | if ref_varname == varname: | ||
364 | bb.fatal(f"{varname} cannot reference itself") | ||
365 | return self.new_agent(ref_varname, creation_info=creation_info) | ||
366 | |||
367 | import_key = self.d.getVar(f"{varname}_import") | ||
368 | if import_key: | ||
369 | return self.new_import(import_key) | ||
370 | |||
371 | name = self.d.getVar(f"{varname}_name") | ||
372 | if not name: | ||
373 | return None | ||
374 | |||
375 | spdxid = self.new_spdxid("agent", name) | ||
376 | agent = self.find_by_id(spdxid) | ||
377 | if agent is not None: | ||
378 | return agent | ||
379 | |||
380 | agent_type = self.d.getVar("%s_type" % varname) | ||
381 | if agent_type == "person": | ||
382 | agent = oe.spdx30.Person() | ||
383 | elif agent_type == "software": | ||
384 | agent = oe.spdx30.SoftwareAgent() | ||
385 | elif agent_type == "organization": | ||
386 | agent = oe.spdx30.Organization() | ||
387 | elif not agent_type or agent_type == "agent": | ||
388 | agent = oe.spdx30.Agent() | ||
389 | else: | ||
390 | bb.fatal("Unknown agent type '%s' in %s_type" % (agent_type, varname)) | ||
391 | |||
392 | agent._id = spdxid | ||
393 | agent.creationInfo = creation_info or self.doc.creationInfo | ||
394 | agent.name = name | ||
395 | |||
396 | comment = self.d.getVar("%s_comment" % varname) | ||
397 | if comment: | ||
398 | agent.comment = comment | ||
399 | |||
400 | for ( | ||
401 | pyname, | ||
402 | idtype, | ||
403 | ) in oe.spdx30.ExternalIdentifierType.NAMED_INDIVIDUALS.items(): | ||
404 | value = self.d.getVar("%s_id_%s" % (varname, pyname)) | ||
405 | if value: | ||
406 | agent.externalIdentifier.append( | ||
407 | oe.spdx30.ExternalIdentifier( | ||
408 | externalIdentifierType=idtype, | ||
409 | identifier=value, | ||
410 | ) | ||
411 | ) | ||
412 | |||
413 | if add: | ||
414 | self.add(agent) | ||
415 | |||
416 | return agent | ||
417 | |||
418 | def new_creation_info(self): | ||
419 | creation_info = oe.spdx30.CreationInfo() | ||
420 | |||
421 | name = "%s %s" % ( | ||
422 | self.d.getVar("SPDX_TOOL_NAME"), | ||
423 | self.d.getVar("SPDX_TOOL_VERSION"), | ||
424 | ) | ||
425 | tool = self.add( | ||
426 | oe.spdx30.Tool( | ||
427 | _id=self.new_spdxid("tool", name), | ||
428 | creationInfo=creation_info, | ||
429 | name=name, | ||
430 | ) | ||
431 | ) | ||
432 | |||
433 | authors = [] | ||
434 | for a in self.d.getVar("SPDX_AUTHORS").split(): | ||
435 | varname = "SPDX_AUTHORS_%s" % a | ||
436 | author = self.new_agent(varname, creation_info=creation_info) | ||
437 | |||
438 | if not author: | ||
439 | bb.fatal("Unable to find or create author %s" % a) | ||
440 | |||
441 | authors.append(author) | ||
442 | |||
443 | creation_info.created = spdx_sde(self.d) | ||
444 | creation_info.specVersion = self.d.getVar("SPDX_VERSION") | ||
445 | creation_info.createdBy = authors | ||
446 | creation_info.createdUsing = [tool] | ||
447 | |||
448 | return creation_info | ||
449 | |||
450 | def copy_creation_info(self, copy): | ||
451 | c = oe.spdx30.CreationInfo( | ||
452 | created=spdx_sde(self.d), | ||
453 | specVersion=self.d.getVar("SPDX_VERSION"), | ||
454 | ) | ||
455 | |||
456 | for author in copy.createdBy: | ||
457 | if isinstance(author, str): | ||
458 | c.createdBy.append(author) | ||
459 | else: | ||
460 | c.createdBy.append(author._id) | ||
461 | |||
462 | for tool in copy.createdUsing: | ||
463 | if isinstance(tool, str): | ||
464 | c.createdUsing.append(tool) | ||
465 | else: | ||
466 | c.createdUsing.append(tool._id) | ||
467 | |||
468 | return c | ||
469 | |||
470 | def new_annotation(self, subject, comment, typ): | ||
471 | return self.add( | ||
472 | oe.spdx30.Annotation( | ||
473 | _id=self.new_spdxid("annotation", spdxid_hash(comment, typ)), | ||
474 | creationInfo=self.doc.creationInfo, | ||
475 | annotationType=typ, | ||
476 | subject=subject, | ||
477 | statement=comment, | ||
478 | ) | ||
479 | ) | ||
480 | |||
481 | def _new_relationship( | ||
482 | self, | ||
483 | cls, | ||
484 | from_, | ||
485 | typ, | ||
486 | to, | ||
487 | *, | ||
488 | spdxid_name="relationship", | ||
489 | **props, | ||
490 | ): | ||
491 | from_ = to_list(from_) | ||
492 | to = to_list(to) | ||
493 | |||
494 | if not from_: | ||
495 | return [] | ||
496 | |||
497 | if not to: | ||
498 | to = [oe.spdx30.IndividualElement.NoneElement] | ||
499 | |||
500 | ret = [] | ||
501 | |||
502 | for f in from_: | ||
503 | hash_args = [typ, f] | ||
504 | for k in sorted(props.keys()): | ||
505 | hash_args.append(props[k]) | ||
506 | hash_args.extend(to) | ||
507 | |||
508 | relationship = self.add( | ||
509 | cls( | ||
510 | _id=self.new_spdxid(spdxid_name, spdxid_hash(*hash_args)), | ||
511 | creationInfo=self.doc.creationInfo, | ||
512 | from_=f, | ||
513 | relationshipType=typ, | ||
514 | to=to, | ||
515 | **props, | ||
516 | ) | ||
517 | ) | ||
518 | ret.append(relationship) | ||
519 | |||
520 | return ret | ||
521 | |||
522 | def new_relationship(self, from_, typ, to): | ||
523 | return self._new_relationship(oe.spdx30.Relationship, from_, typ, to) | ||
524 | |||
525 | def new_scoped_relationship(self, from_, typ, scope, to): | ||
526 | return self._new_relationship( | ||
527 | oe.spdx30.LifecycleScopedRelationship, | ||
528 | from_, | ||
529 | typ, | ||
530 | to, | ||
531 | scope=scope, | ||
532 | ) | ||
533 | |||
534 | def new_license_expression( | ||
535 | self, license_expression, license_data, license_text_map={} | ||
536 | ): | ||
537 | license_list_version = license_data["licenseListVersion"] | ||
538 | # SPDX 3 requires that the license list version be a semver | ||
539 | # MAJOR.MINOR.MICRO, but the actual license version might be | ||
540 | # MAJOR.MINOR on some older versions. As such, manually append a .0 | ||
541 | # micro version if its missing to keep SPDX happy | ||
542 | if license_list_version.count(".") < 2: | ||
543 | license_list_version += ".0" | ||
544 | |||
545 | spdxid = [ | ||
546 | "license", | ||
547 | license_list_version, | ||
548 | re.sub(r"[^a-zA-Z0-9_-]", "_", license_expression), | ||
549 | ] | ||
550 | |||
551 | license_text = [ | ||
552 | (k, license_text_map[k]) for k in sorted(license_text_map.keys()) | ||
553 | ] | ||
554 | |||
555 | if not license_text: | ||
556 | lic = self.find_filter( | ||
557 | oe.spdx30.simplelicensing_LicenseExpression, | ||
558 | simplelicensing_licenseExpression=license_expression, | ||
559 | simplelicensing_licenseListVersion=license_list_version, | ||
560 | ) | ||
561 | if lic is not None: | ||
562 | return lic | ||
563 | else: | ||
564 | spdxid.append(spdxid_hash(*(v for _, v in license_text))) | ||
565 | lic = self.find_by_id(self.new_spdxid(*spdxid)) | ||
566 | if lic is not None: | ||
567 | return lic | ||
568 | |||
569 | lic = self.add( | ||
570 | oe.spdx30.simplelicensing_LicenseExpression( | ||
571 | _id=self.new_spdxid(*spdxid), | ||
572 | creationInfo=self.doc.creationInfo, | ||
573 | simplelicensing_licenseExpression=license_expression, | ||
574 | simplelicensing_licenseListVersion=license_list_version, | ||
575 | ) | ||
576 | ) | ||
577 | |||
578 | for key, value in license_text: | ||
579 | lic.simplelicensing_customIdToUri.append( | ||
580 | oe.spdx30.DictionaryEntry(key=key, value=value) | ||
581 | ) | ||
582 | |||
583 | return lic | ||
584 | |||
585 | def scan_declared_licenses(self, spdx_file, filepath, license_data): | ||
586 | for e in spdx_file.extension: | ||
587 | if isinstance(e, OELicenseScannedExtension): | ||
588 | return | ||
589 | |||
590 | file_licenses = set() | ||
591 | for extracted_lic in oe.spdx_common.extract_licenses(filepath): | ||
592 | lic = self.new_license_expression(extracted_lic, license_data) | ||
593 | self.set_element_alias(lic) | ||
594 | file_licenses.add(lic) | ||
595 | |||
596 | self.new_relationship( | ||
597 | [spdx_file], | ||
598 | oe.spdx30.RelationshipType.hasDeclaredLicense, | ||
599 | [oe.sbom30.get_element_link_id(lic_alias) for lic_alias in file_licenses], | ||
600 | ) | ||
601 | spdx_file.extension.append(OELicenseScannedExtension()) | ||
602 | |||
603 | def new_file(self, _id, name, path, *, purposes=[]): | ||
604 | sha256_hash = bb.utils.sha256_file(path) | ||
605 | |||
606 | for f in self.by_sha256_hash.get(sha256_hash, []): | ||
607 | if not isinstance(f, oe.spdx30.software_File): | ||
608 | continue | ||
609 | |||
610 | if purposes: | ||
611 | new_primary = purposes[0] | ||
612 | new_additional = [] | ||
613 | |||
614 | if f.software_primaryPurpose: | ||
615 | new_additional.append(f.software_primaryPurpose) | ||
616 | new_additional.extend(f.software_additionalPurpose) | ||
617 | |||
618 | new_additional = sorted( | ||
619 | list(set(p for p in new_additional if p != new_primary)) | ||
620 | ) | ||
621 | |||
622 | f.software_primaryPurpose = new_primary | ||
623 | f.software_additionalPurpose = new_additional | ||
624 | |||
625 | if f.name != name: | ||
626 | for e in f.extension: | ||
627 | if isinstance(e, OEFileNameAliasExtension): | ||
628 | e.aliases.append(name) | ||
629 | break | ||
630 | else: | ||
631 | f.extension.append(OEFileNameAliasExtension(aliases=[name])) | ||
632 | |||
633 | return f | ||
634 | |||
635 | spdx_file = oe.spdx30.software_File( | ||
636 | _id=_id, | ||
637 | creationInfo=self.doc.creationInfo, | ||
638 | name=name, | ||
639 | ) | ||
640 | if purposes: | ||
641 | spdx_file.software_primaryPurpose = purposes[0] | ||
642 | spdx_file.software_additionalPurpose = purposes[1:] | ||
643 | |||
644 | spdx_file.verifiedUsing.append( | ||
645 | oe.spdx30.Hash( | ||
646 | algorithm=oe.spdx30.HashAlgorithm.sha256, | ||
647 | hashValue=sha256_hash, | ||
648 | ) | ||
649 | ) | ||
650 | |||
651 | return self.add(spdx_file) | ||
652 | |||
653 | def new_cve_vuln(self, cve): | ||
654 | v = oe.spdx30.security_Vulnerability() | ||
655 | v._id = self.new_spdxid("vulnerability", cve) | ||
656 | v.creationInfo = self.doc.creationInfo | ||
657 | |||
658 | v.externalIdentifier.append( | ||
659 | oe.spdx30.ExternalIdentifier( | ||
660 | externalIdentifierType=oe.spdx30.ExternalIdentifierType.cve, | ||
661 | identifier=cve, | ||
662 | identifierLocator=[ | ||
663 | f"https://cveawg.mitre.org/api/cve/{cve}", | ||
664 | f"https://www.cve.org/CVERecord?id={cve}", | ||
665 | ], | ||
666 | ) | ||
667 | ) | ||
668 | return self.add(v) | ||
669 | |||
670 | def new_vex_patched_relationship(self, from_, to): | ||
671 | return self._new_relationship( | ||
672 | oe.spdx30.security_VexFixedVulnAssessmentRelationship, | ||
673 | from_, | ||
674 | oe.spdx30.RelationshipType.fixedIn, | ||
675 | to, | ||
676 | spdxid_name="vex-fixed", | ||
677 | security_vexVersion=VEX_VERSION, | ||
678 | ) | ||
679 | |||
680 | def new_vex_unpatched_relationship(self, from_, to): | ||
681 | return self._new_relationship( | ||
682 | oe.spdx30.security_VexAffectedVulnAssessmentRelationship, | ||
683 | from_, | ||
684 | oe.spdx30.RelationshipType.affects, | ||
685 | to, | ||
686 | spdxid_name="vex-affected", | ||
687 | security_vexVersion=VEX_VERSION, | ||
688 | security_actionStatement="Mitigation action unknown", | ||
689 | ) | ||
690 | |||
691 | def new_vex_ignored_relationship(self, from_, to, *, impact_statement): | ||
692 | return self._new_relationship( | ||
693 | oe.spdx30.security_VexNotAffectedVulnAssessmentRelationship, | ||
694 | from_, | ||
695 | oe.spdx30.RelationshipType.doesNotAffect, | ||
696 | to, | ||
697 | spdxid_name="vex-not-affected", | ||
698 | security_vexVersion=VEX_VERSION, | ||
699 | security_impactStatement=impact_statement, | ||
700 | ) | ||
701 | |||
702 | def import_bitbake_build_objset(self): | ||
703 | deploy_dir_spdx = Path(self.d.getVar("DEPLOY_DIR_SPDX")) | ||
704 | bb_objset = load_jsonld( | ||
705 | self.d, deploy_dir_spdx / "bitbake.spdx.json", required=True | ||
706 | ) | ||
707 | self.doc.import_.extend(bb_objset.doc.import_) | ||
708 | self.update(bb_objset.objects) | ||
709 | |||
710 | return bb_objset | ||
711 | |||
712 | def import_bitbake_build(self): | ||
713 | def find_bitbake_build(objset): | ||
714 | return objset.find_filter( | ||
715 | oe.spdx30.build_Build, | ||
716 | build_buildType=SPDX_BUILD_TYPE, | ||
717 | ) | ||
718 | |||
719 | build = find_bitbake_build(self) | ||
720 | if build: | ||
721 | return build | ||
722 | |||
723 | bb_objset = self.import_bitbake_build_objset() | ||
724 | build = find_bitbake_build(bb_objset) | ||
725 | if build is None: | ||
726 | bb.fatal(f"No build found in {deploy_dir_spdx}") | ||
727 | |||
728 | return build | ||
729 | |||
730 | def new_task_build(self, name, typ): | ||
731 | current_task = self.d.getVar("BB_CURRENTTASK") | ||
732 | pn = self.d.getVar("PN") | ||
733 | |||
734 | build = self.add( | ||
735 | oe.spdx30.build_Build( | ||
736 | _id=self.new_spdxid("build", name), | ||
737 | creationInfo=self.doc.creationInfo, | ||
738 | name=f"{pn}:do_{current_task}:{name}", | ||
739 | build_buildType=f"{SPDX_BUILD_TYPE}/do_{current_task}/{typ}", | ||
740 | ) | ||
741 | ) | ||
742 | |||
743 | if self.d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1": | ||
744 | bitbake_build = self.import_bitbake_build() | ||
745 | |||
746 | self.new_relationship( | ||
747 | [bitbake_build], | ||
748 | oe.spdx30.RelationshipType.ancestorOf, | ||
749 | [build], | ||
750 | ) | ||
751 | |||
752 | if self.d.getVar("SPDX_INCLUDE_BUILD_VARIABLES") == "1": | ||
753 | for varname in sorted(self.d.keys()): | ||
754 | if varname.startswith("__"): | ||
755 | continue | ||
756 | |||
757 | value = self.d.getVar(varname, expand=False) | ||
758 | |||
759 | # TODO: Deal with non-string values | ||
760 | if not isinstance(value, str): | ||
761 | continue | ||
762 | |||
763 | build.build_parameter.append( | ||
764 | oe.spdx30.DictionaryEntry(key=varname, value=value) | ||
765 | ) | ||
766 | |||
767 | return build | ||
768 | |||
769 | def new_archive(self, archive_name): | ||
770 | return self.add( | ||
771 | oe.spdx30.software_File( | ||
772 | _id=self.new_spdxid("archive", str(archive_name)), | ||
773 | creationInfo=self.doc.creationInfo, | ||
774 | name=str(archive_name), | ||
775 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive, | ||
776 | ) | ||
777 | ) | ||
778 | |||
779 | @classmethod | ||
780 | def new_objset(cls, d, name, copy_from_bitbake_doc=True): | ||
781 | objset = cls(d) | ||
782 | |||
783 | document = oe.spdx30.SpdxDocument( | ||
784 | _id=objset.new_spdxid("document", name), | ||
785 | name=name, | ||
786 | ) | ||
787 | |||
788 | document.extension.append( | ||
789 | OEIdAliasExtension( | ||
790 | alias=objset.new_alias_id( | ||
791 | document, | ||
792 | OE_DOC_ALIAS_PREFIX + d.getVar("PN") + "/" + name + "/", | ||
793 | ), | ||
794 | ) | ||
795 | ) | ||
796 | objset.doc = document | ||
797 | objset.add_index(document) | ||
798 | |||
799 | if copy_from_bitbake_doc: | ||
800 | bb_objset = objset.import_bitbake_build_objset() | ||
801 | document.creationInfo = objset.copy_creation_info( | ||
802 | bb_objset.doc.creationInfo | ||
803 | ) | ||
804 | else: | ||
805 | document.creationInfo = objset.new_creation_info() | ||
806 | |||
807 | return objset | ||
808 | |||
809 | def expand_collection(self, *, add_objectsets=[]): | ||
810 | """ | ||
811 | Expands a collection to pull in all missing elements | ||
812 | |||
813 | Returns the set of ids that could not be found to link into the document | ||
814 | """ | ||
815 | missing_spdxids = set() | ||
816 | imports = {e.externalSpdxId: e for e in self.doc.import_} | ||
817 | |||
818 | def merge_doc(other): | ||
819 | nonlocal imports | ||
820 | |||
821 | for e in other.doc.import_: | ||
822 | if not e.externalSpdxId in imports: | ||
823 | imports[e.externalSpdxId] = e | ||
824 | |||
825 | self.objects |= other.objects | ||
826 | |||
827 | for o in add_objectsets: | ||
828 | merge_doc(o) | ||
829 | |||
830 | needed_spdxids = self.link() | ||
831 | provided_spdxids = set(self.obj_by_id.keys()) | ||
832 | |||
833 | while True: | ||
834 | import_spdxids = set(imports.keys()) | ||
835 | searching_spdxids = ( | ||
836 | needed_spdxids - provided_spdxids - missing_spdxids - import_spdxids | ||
837 | ) | ||
838 | if not searching_spdxids: | ||
839 | break | ||
840 | |||
841 | spdxid = searching_spdxids.pop() | ||
842 | bb.debug( | ||
843 | 1, | ||
844 | f"Searching for {spdxid}. Remaining: {len(searching_spdxids)}, Total: {len(provided_spdxids)}, Missing: {len(missing_spdxids)}, Imports: {len(import_spdxids)}", | ||
845 | ) | ||
846 | dep_objset, dep_path = find_by_spdxid(self.d, spdxid) | ||
847 | |||
848 | if dep_objset: | ||
849 | dep_provided = set(dep_objset.obj_by_id.keys()) | ||
850 | if spdxid not in dep_provided: | ||
851 | bb.fatal(f"{spdxid} not found in {dep_path}") | ||
852 | provided_spdxids |= dep_provided | ||
853 | needed_spdxids |= dep_objset.missing_ids | ||
854 | merge_doc(dep_objset) | ||
855 | else: | ||
856 | missing_spdxids.add(spdxid) | ||
857 | |||
858 | self.doc.import_ = sorted(imports.values(), key=lambda e: e.externalSpdxId) | ||
859 | bb.debug(1, "Linking...") | ||
860 | self.link() | ||
861 | |||
862 | # Manually go through all of the simplelicensing_customIdToUri DictionaryEntry | ||
863 | # items and resolve any aliases to actual objects. | ||
864 | for lic in self.foreach_type(oe.spdx30.simplelicensing_LicenseExpression): | ||
865 | for d in lic.simplelicensing_customIdToUri: | ||
866 | if d.value.startswith(OE_ALIAS_PREFIX): | ||
867 | obj = self.find_by_id(d.value) | ||
868 | if obj is not None: | ||
869 | d.value = obj._id | ||
870 | else: | ||
871 | self.missing_ids.add(d.value) | ||
872 | |||
873 | self.missing_ids -= set(imports.keys()) | ||
874 | return self.missing_ids | ||
875 | |||
876 | |||
877 | def load_jsonld(d, path, required=False): | ||
878 | deserializer = oe.spdx30.JSONLDDeserializer() | ||
879 | objset = ObjectSet(d) | ||
880 | try: | ||
881 | with path.open("rb") as f: | ||
882 | deserializer.read(f, objset) | ||
883 | except FileNotFoundError: | ||
884 | if required: | ||
885 | bb.fatal("No SPDX document named %s found" % path) | ||
886 | return None | ||
887 | |||
888 | if not objset.doc: | ||
889 | bb.fatal("SPDX Document %s has no SPDXDocument element" % path) | ||
890 | return None | ||
891 | |||
892 | objset.objects.remove(objset.doc) | ||
893 | return objset | ||
894 | |||
895 | |||
896 | def jsonld_arch_path(d, arch, subdir, name, deploydir=None): | ||
897 | if deploydir is None: | ||
898 | deploydir = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
899 | return deploydir / arch / subdir / (name + ".spdx.json") | ||
900 | |||
901 | |||
902 | def jsonld_hash_path(h): | ||
903 | return Path("by-spdxid-hash") / h[:2], h | ||
904 | |||
905 | |||
906 | def load_jsonld_by_arch(d, arch, subdir, name, *, required=False): | ||
907 | path = jsonld_arch_path(d, arch, subdir, name) | ||
908 | objset = load_jsonld(d, path, required=required) | ||
909 | if objset is not None: | ||
910 | return (objset, path) | ||
911 | return (None, None) | ||
912 | |||
913 | |||
914 | def find_jsonld(d, subdir, name, *, required=False): | ||
915 | package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split() | ||
916 | package_archs.reverse() | ||
917 | |||
918 | for arch in package_archs: | ||
919 | objset, path = load_jsonld_by_arch(d, arch, subdir, name) | ||
920 | if objset is not None: | ||
921 | return (objset, path) | ||
922 | |||
923 | if required: | ||
924 | bb.fatal("Could not find a %s SPDX document named %s" % (subdir, name)) | ||
925 | |||
926 | return (None, None) | ||
927 | |||
928 | |||
929 | def write_jsonld_doc(d, objset, dest): | ||
930 | if not isinstance(objset, ObjectSet): | ||
931 | bb.fatal("Only an ObjsetSet can be serialized") | ||
932 | return | ||
933 | |||
934 | if not objset.doc: | ||
935 | bb.fatal("ObjectSet is missing a SpdxDocument") | ||
936 | return | ||
937 | |||
938 | objset.doc.rootElement = sorted(list(set(objset.doc.rootElement))) | ||
939 | objset.doc.profileConformance = sorted( | ||
940 | list( | ||
941 | getattr(oe.spdx30.ProfileIdentifierType, p) | ||
942 | for p in d.getVar("SPDX_PROFILES").split() | ||
943 | ) | ||
944 | ) | ||
945 | |||
946 | dest.parent.mkdir(exist_ok=True, parents=True) | ||
947 | |||
948 | if d.getVar("SPDX_PRETTY") == "1": | ||
949 | serializer = oe.spdx30.JSONLDSerializer( | ||
950 | indent=2, | ||
951 | ) | ||
952 | else: | ||
953 | serializer = oe.spdx30.JSONLDInlineSerializer() | ||
954 | |||
955 | objset.objects.add(objset.doc) | ||
956 | with dest.open("wb") as f: | ||
957 | serializer.write(objset, f, force_at_graph=True) | ||
958 | objset.objects.remove(objset.doc) | ||
959 | |||
960 | |||
961 | def write_recipe_jsonld_doc( | ||
962 | d, | ||
963 | objset, | ||
964 | subdir, | ||
965 | deploydir, | ||
966 | *, | ||
967 | create_spdx_id_links=True, | ||
968 | ): | ||
969 | pkg_arch = d.getVar("SSTATE_PKGARCH") | ||
970 | |||
971 | dest = jsonld_arch_path(d, pkg_arch, subdir, objset.doc.name, deploydir=deploydir) | ||
972 | |||
973 | def link_id(_id): | ||
974 | hash_path = jsonld_hash_path(hash_id(_id)) | ||
975 | |||
976 | link_name = jsonld_arch_path( | ||
977 | d, | ||
978 | pkg_arch, | ||
979 | *hash_path, | ||
980 | deploydir=deploydir, | ||
981 | ) | ||
982 | try: | ||
983 | link_name.parent.mkdir(exist_ok=True, parents=True) | ||
984 | link_name.symlink_to(os.path.relpath(dest, link_name.parent)) | ||
985 | except: | ||
986 | target = link_name.readlink() | ||
987 | bb.warn( | ||
988 | f"Unable to link {_id} in {dest} as {link_name}. Already points to {target}" | ||
989 | ) | ||
990 | raise | ||
991 | |||
992 | return hash_path[-1] | ||
993 | |||
994 | objset.add_aliases() | ||
995 | |||
996 | try: | ||
997 | if create_spdx_id_links: | ||
998 | alias_ext = get_alias(objset.doc) | ||
999 | if alias_ext is not None and alias_ext.alias: | ||
1000 | alias_ext.link_name = link_id(alias_ext.alias) | ||
1001 | |||
1002 | finally: | ||
1003 | # It is really helpful for debugging if the JSON document is written | ||
1004 | # out, so always do that even if there is an error making the links | ||
1005 | write_jsonld_doc(d, objset, dest) | ||
1006 | |||
1007 | |||
1008 | def find_root_obj_in_jsonld(d, subdir, fn_name, obj_type, **attr_filter): | ||
1009 | objset, fn = find_jsonld(d, subdir, fn_name, required=True) | ||
1010 | |||
1011 | spdx_obj = objset.find_root(obj_type, **attr_filter) | ||
1012 | if not spdx_obj: | ||
1013 | bb.fatal("No root %s found in %s" % (obj_type.__name__, fn)) | ||
1014 | |||
1015 | return spdx_obj, objset | ||
1016 | |||
1017 | |||
1018 | def load_obj_in_jsonld(d, arch, subdir, fn_name, obj_type, **attr_filter): | ||
1019 | objset, fn = load_jsonld_by_arch(d, arch, subdir, fn_name, required=True) | ||
1020 | |||
1021 | spdx_obj = objset.find_filter(obj_type, **attr_filter) | ||
1022 | if not spdx_obj: | ||
1023 | bb.fatal("No %s found in %s" % (obj_type.__name__, fn)) | ||
1024 | |||
1025 | return spdx_obj, objset | ||
1026 | |||
1027 | |||
1028 | def find_by_spdxid(d, spdxid, *, required=False): | ||
1029 | if spdxid.startswith(OE_ALIAS_PREFIX): | ||
1030 | h = spdxid[len(OE_ALIAS_PREFIX) :].split("/", 1)[0] | ||
1031 | return find_jsonld(d, *jsonld_hash_path(h), required=required) | ||
1032 | return find_jsonld(d, *jsonld_hash_path(hash_id(spdxid)), required=required) | ||
1033 | |||
1034 | |||
1035 | def create_sbom(d, name, root_elements, add_objectsets=[]): | ||
1036 | objset = ObjectSet.new_objset(d, name) | ||
1037 | |||
1038 | sbom = objset.add( | ||
1039 | oe.spdx30.software_Sbom( | ||
1040 | _id=objset.new_spdxid("sbom", name), | ||
1041 | name=name, | ||
1042 | creationInfo=objset.doc.creationInfo, | ||
1043 | software_sbomType=[oe.spdx30.software_SbomType.build], | ||
1044 | rootElement=root_elements, | ||
1045 | ) | ||
1046 | ) | ||
1047 | |||
1048 | missing_spdxids = objset.expand_collection(add_objectsets=add_objectsets) | ||
1049 | if missing_spdxids: | ||
1050 | bb.warn( | ||
1051 | "The following SPDX IDs were unable to be resolved:\n " | ||
1052 | + "\n ".join(sorted(list(missing_spdxids))) | ||
1053 | ) | ||
1054 | |||
1055 | # Filter out internal extensions from final SBoMs | ||
1056 | objset.remove_internal_extensions() | ||
1057 | |||
1058 | # SBoM should be the only root element of the document | ||
1059 | objset.doc.rootElement = [sbom] | ||
1060 | |||
1061 | # De-duplicate licenses | ||
1062 | unique = set() | ||
1063 | dedup = {} | ||
1064 | for lic in objset.foreach_type(oe.spdx30.simplelicensing_LicenseExpression): | ||
1065 | for u in unique: | ||
1066 | if ( | ||
1067 | u.simplelicensing_licenseExpression | ||
1068 | == lic.simplelicensing_licenseExpression | ||
1069 | and u.simplelicensing_licenseListVersion | ||
1070 | == lic.simplelicensing_licenseListVersion | ||
1071 | ): | ||
1072 | dedup[lic] = u | ||
1073 | break | ||
1074 | else: | ||
1075 | unique.add(lic) | ||
1076 | |||
1077 | if dedup: | ||
1078 | for rel in objset.foreach_filter( | ||
1079 | oe.spdx30.Relationship, | ||
1080 | relationshipType=oe.spdx30.RelationshipType.hasDeclaredLicense, | ||
1081 | ): | ||
1082 | rel.to = [dedup.get(to, to) for to in rel.to] | ||
1083 | |||
1084 | for rel in objset.foreach_filter( | ||
1085 | oe.spdx30.Relationship, | ||
1086 | relationshipType=oe.spdx30.RelationshipType.hasConcludedLicense, | ||
1087 | ): | ||
1088 | rel.to = [dedup.get(to, to) for to in rel.to] | ||
1089 | |||
1090 | for k, v in dedup.items(): | ||
1091 | bb.debug(1, f"Removing duplicate License {k._id} -> {v._id}") | ||
1092 | objset.objects.remove(k) | ||
1093 | |||
1094 | objset.create_index() | ||
1095 | |||
1096 | return objset, sbom | ||
diff --git a/meta/lib/oe/sdk.py b/meta/lib/oe/sdk.py index 3dc3672210..9fe0fbb752 100644 --- a/meta/lib/oe/sdk.py +++ b/meta/lib/oe/sdk.py | |||
@@ -148,13 +148,11 @@ def get_extra_sdkinfo(sstate_dir): | |||
148 | extra_info['filesizes'] = {} | 148 | extra_info['filesizes'] = {} |
149 | for root, _, files in os.walk(sstate_dir): | 149 | for root, _, files in os.walk(sstate_dir): |
150 | for fn in files: | 150 | for fn in files: |
151 | if fn.endswith('.tgz'): | 151 | # Note that this makes an assumption about the sstate filenames |
152 | if '.tar.' in fn and not fn.endswith('.siginfo'): | ||
152 | fsize = int(math.ceil(float(os.path.getsize(os.path.join(root, fn))) / 1024)) | 153 | fsize = int(math.ceil(float(os.path.getsize(os.path.join(root, fn))) / 1024)) |
153 | task = fn.rsplit(':',1)[1].split('_',1)[1].split(',')[0] | 154 | task = fn.rsplit(':',1)[1].split('_',1)[1].split(',')[0] |
154 | origtotal = extra_info['tasksizes'].get(task, 0) | 155 | origtotal = extra_info['tasksizes'].get(task, 0) |
155 | extra_info['tasksizes'][task] = origtotal + fsize | 156 | extra_info['tasksizes'][task] = origtotal + fsize |
156 | extra_info['filesizes'][fn] = fsize | 157 | extra_info['filesizes'][fn] = fsize |
157 | return extra_info | 158 | return extra_info |
158 | |||
159 | if __name__ == "__main__": | ||
160 | pass | ||
diff --git a/meta/lib/oe/spdx30.py b/meta/lib/oe/spdx30.py new file mode 100644 index 0000000000..cd97eebd18 --- /dev/null +++ b/meta/lib/oe/spdx30.py | |||
@@ -0,0 +1,5593 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | # | ||
3 | # Generated Python bindings from a SHACL model | ||
4 | # | ||
5 | # This file was automatically generated by shacl2code. DO NOT MANUALLY MODIFY IT | ||
6 | # | ||
7 | # SPDX-License-Identifier: MIT | ||
8 | |||
9 | import functools | ||
10 | import hashlib | ||
11 | import json | ||
12 | import re | ||
13 | import sys | ||
14 | import threading | ||
15 | import time | ||
16 | from contextlib import contextmanager | ||
17 | from datetime import datetime, timezone, timedelta | ||
18 | from enum import Enum | ||
19 | from abc import ABC, abstractmethod | ||
20 | |||
21 | |||
22 | def check_type(obj, types): | ||
23 | if not isinstance(obj, types): | ||
24 | if isinstance(types, (list, tuple)): | ||
25 | raise TypeError( | ||
26 | f"Value must be one of type: {', '.join(t.__name__ for t in types)}. Got {type(obj)}" | ||
27 | ) | ||
28 | raise TypeError(f"Value must be of type {types.__name__}. Got {type(obj)}") | ||
29 | |||
30 | |||
31 | class Property(ABC): | ||
32 | """ | ||
33 | A generic SHACL object property. The different types will derive from this | ||
34 | class | ||
35 | """ | ||
36 | |||
37 | def __init__(self, *, pattern=None): | ||
38 | self.pattern = pattern | ||
39 | |||
40 | def init(self): | ||
41 | return None | ||
42 | |||
43 | def validate(self, value): | ||
44 | check_type(value, self.VALID_TYPES) | ||
45 | if self.pattern is not None and not re.search( | ||
46 | self.pattern, self.to_string(value) | ||
47 | ): | ||
48 | raise ValueError( | ||
49 | f"Value is not correctly formatted. Got '{self.to_string(value)}'" | ||
50 | ) | ||
51 | |||
52 | def set(self, value): | ||
53 | return value | ||
54 | |||
55 | def check_min_count(self, value, min_count): | ||
56 | return min_count == 1 | ||
57 | |||
58 | def check_max_count(self, value, max_count): | ||
59 | return max_count == 1 | ||
60 | |||
61 | def elide(self, value): | ||
62 | return value is None | ||
63 | |||
64 | def walk(self, value, callback, path): | ||
65 | callback(value, path) | ||
66 | |||
67 | def iter_objects(self, value, recursive, visited): | ||
68 | return [] | ||
69 | |||
70 | def link_prop(self, value, objectset, missing, visited): | ||
71 | return value | ||
72 | |||
73 | def to_string(self, value): | ||
74 | return str(value) | ||
75 | |||
76 | @abstractmethod | ||
77 | def encode(self, encoder, value, state): | ||
78 | pass | ||
79 | |||
80 | @abstractmethod | ||
81 | def decode(self, decoder, *, objectset=None): | ||
82 | pass | ||
83 | |||
84 | |||
85 | class StringProp(Property): | ||
86 | """ | ||
87 | A scalar string property for an SHACL object | ||
88 | """ | ||
89 | |||
90 | VALID_TYPES = str | ||
91 | |||
92 | def set(self, value): | ||
93 | return str(value) | ||
94 | |||
95 | def encode(self, encoder, value, state): | ||
96 | encoder.write_string(value) | ||
97 | |||
98 | def decode(self, decoder, *, objectset=None): | ||
99 | return decoder.read_string() | ||
100 | |||
101 | |||
102 | class AnyURIProp(StringProp): | ||
103 | def encode(self, encoder, value, state): | ||
104 | encoder.write_iri(value) | ||
105 | |||
106 | def decode(self, decoder, *, objectset=None): | ||
107 | return decoder.read_iri() | ||
108 | |||
109 | |||
110 | class DateTimeProp(Property): | ||
111 | """ | ||
112 | A Date/Time Object with optional timezone | ||
113 | """ | ||
114 | |||
115 | VALID_TYPES = datetime | ||
116 | UTC_FORMAT_STR = "%Y-%m-%dT%H:%M:%SZ" | ||
117 | REGEX = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2})?$" | ||
118 | |||
119 | def set(self, value): | ||
120 | return self._normalize(value) | ||
121 | |||
122 | def encode(self, encoder, value, state): | ||
123 | encoder.write_datetime(self.to_string(value)) | ||
124 | |||
125 | def decode(self, decoder, *, objectset=None): | ||
126 | s = decoder.read_datetime() | ||
127 | if s is None: | ||
128 | return None | ||
129 | v = self.from_string(s) | ||
130 | return self._normalize(v) | ||
131 | |||
132 | def _normalize(self, value): | ||
133 | if value.utcoffset() is None: | ||
134 | value = value.astimezone() | ||
135 | offset = value.utcoffset() | ||
136 | seconds = offset % timedelta(minutes=-1 if offset.total_seconds() < 0 else 1) | ||
137 | if seconds: | ||
138 | offset = offset - seconds | ||
139 | value = value.replace(tzinfo=timezone(offset)) | ||
140 | value = value.replace(microsecond=0) | ||
141 | return value | ||
142 | |||
143 | def to_string(self, value): | ||
144 | value = self._normalize(value) | ||
145 | if value.tzinfo == timezone.utc: | ||
146 | return value.strftime(self.UTC_FORMAT_STR) | ||
147 | return value.isoformat() | ||
148 | |||
149 | def from_string(self, value): | ||
150 | if not re.match(self.REGEX, value): | ||
151 | raise ValueError(f"'{value}' is not a correctly formatted datetime") | ||
152 | if "Z" in value: | ||
153 | d = datetime( | ||
154 | *(time.strptime(value, self.UTC_FORMAT_STR)[0:6]), | ||
155 | tzinfo=timezone.utc, | ||
156 | ) | ||
157 | else: | ||
158 | d = datetime.fromisoformat(value) | ||
159 | |||
160 | return self._normalize(d) | ||
161 | |||
162 | |||
163 | class DateTimeStampProp(DateTimeProp): | ||
164 | """ | ||
165 | A Date/Time Object with required timestamp | ||
166 | """ | ||
167 | |||
168 | REGEX = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2})$" | ||
169 | |||
170 | |||
171 | class IntegerProp(Property): | ||
172 | VALID_TYPES = int | ||
173 | |||
174 | def set(self, value): | ||
175 | return int(value) | ||
176 | |||
177 | def encode(self, encoder, value, state): | ||
178 | encoder.write_integer(value) | ||
179 | |||
180 | def decode(self, decoder, *, objectset=None): | ||
181 | return decoder.read_integer() | ||
182 | |||
183 | |||
184 | class PositiveIntegerProp(IntegerProp): | ||
185 | def validate(self, value): | ||
186 | super().validate(value) | ||
187 | if value < 1: | ||
188 | raise ValueError(f"Value must be >=1. Got {value}") | ||
189 | |||
190 | |||
191 | class NonNegativeIntegerProp(IntegerProp): | ||
192 | def validate(self, value): | ||
193 | super().validate(value) | ||
194 | if value < 0: | ||
195 | raise ValueError(f"Value must be >= 0. Got {value}") | ||
196 | |||
197 | |||
198 | class BooleanProp(Property): | ||
199 | VALID_TYPES = bool | ||
200 | |||
201 | def set(self, value): | ||
202 | return bool(value) | ||
203 | |||
204 | def encode(self, encoder, value, state): | ||
205 | encoder.write_bool(value) | ||
206 | |||
207 | def decode(self, decoder, *, objectset=None): | ||
208 | return decoder.read_bool() | ||
209 | |||
210 | |||
211 | class FloatProp(Property): | ||
212 | VALID_TYPES = (float, int) | ||
213 | |||
214 | def set(self, value): | ||
215 | return float(value) | ||
216 | |||
217 | def encode(self, encoder, value, state): | ||
218 | encoder.write_float(value) | ||
219 | |||
220 | def decode(self, decoder, *, objectset=None): | ||
221 | return decoder.read_float() | ||
222 | |||
223 | |||
224 | class IRIProp(Property): | ||
225 | def __init__(self, context=[], *, pattern=None): | ||
226 | super().__init__(pattern=pattern) | ||
227 | self.context = context | ||
228 | |||
229 | def compact(self, value): | ||
230 | for iri, compact in self.context: | ||
231 | if value == iri: | ||
232 | return compact | ||
233 | return None | ||
234 | |||
235 | def expand(self, value): | ||
236 | for iri, compact in self.context: | ||
237 | if value == compact: | ||
238 | return iri | ||
239 | return None | ||
240 | |||
241 | def iri_values(self): | ||
242 | return (iri for iri, _ in self.context) | ||
243 | |||
244 | |||
245 | class ObjectProp(IRIProp): | ||
246 | """ | ||
247 | A scalar SHACL object property of a SHACL object | ||
248 | """ | ||
249 | |||
250 | def __init__(self, cls, required, context=[]): | ||
251 | super().__init__(context) | ||
252 | self.cls = cls | ||
253 | self.required = required | ||
254 | |||
255 | def init(self): | ||
256 | if self.required and not self.cls.IS_ABSTRACT: | ||
257 | return self.cls() | ||
258 | return None | ||
259 | |||
260 | def validate(self, value): | ||
261 | check_type(value, (self.cls, str)) | ||
262 | |||
263 | def walk(self, value, callback, path): | ||
264 | if value is None: | ||
265 | return | ||
266 | |||
267 | if not isinstance(value, str): | ||
268 | value.walk(callback, path) | ||
269 | else: | ||
270 | callback(value, path) | ||
271 | |||
272 | def iter_objects(self, value, recursive, visited): | ||
273 | if value is None or isinstance(value, str): | ||
274 | return | ||
275 | |||
276 | if value not in visited: | ||
277 | visited.add(value) | ||
278 | yield value | ||
279 | |||
280 | if recursive: | ||
281 | for c in value.iter_objects(recursive=True, visited=visited): | ||
282 | yield c | ||
283 | |||
284 | def encode(self, encoder, value, state): | ||
285 | if value is None: | ||
286 | raise ValueError("Object cannot be None") | ||
287 | |||
288 | if isinstance(value, str): | ||
289 | encoder.write_iri(value, self.compact(value)) | ||
290 | return | ||
291 | |||
292 | return value.encode(encoder, state) | ||
293 | |||
294 | def decode(self, decoder, *, objectset=None): | ||
295 | iri = decoder.read_iri() | ||
296 | if iri is None: | ||
297 | return self.cls.decode(decoder, objectset=objectset) | ||
298 | |||
299 | iri = self.expand(iri) or iri | ||
300 | |||
301 | if objectset is None: | ||
302 | return iri | ||
303 | |||
304 | obj = objectset.find_by_id(iri) | ||
305 | if obj is None: | ||
306 | return iri | ||
307 | |||
308 | self.validate(obj) | ||
309 | return obj | ||
310 | |||
311 | def link_prop(self, value, objectset, missing, visited): | ||
312 | if value is None: | ||
313 | return value | ||
314 | |||
315 | if isinstance(value, str): | ||
316 | o = objectset.find_by_id(value) | ||
317 | if o is not None: | ||
318 | self.validate(o) | ||
319 | return o | ||
320 | |||
321 | if missing is not None: | ||
322 | missing.add(value) | ||
323 | |||
324 | return value | ||
325 | |||
326 | # De-duplicate IDs | ||
327 | if value._id: | ||
328 | value = objectset.find_by_id(value._id, value) | ||
329 | self.validate(value) | ||
330 | |||
331 | value.link_helper(objectset, missing, visited) | ||
332 | return value | ||
333 | |||
334 | |||
335 | class ListProxy(object): | ||
336 | def __init__(self, prop, data=None): | ||
337 | if data is None: | ||
338 | self.__data = [] | ||
339 | else: | ||
340 | self.__data = data | ||
341 | self.__prop = prop | ||
342 | |||
343 | def append(self, value): | ||
344 | self.__prop.validate(value) | ||
345 | self.__data.append(self.__prop.set(value)) | ||
346 | |||
347 | def insert(self, idx, value): | ||
348 | self.__prop.validate(value) | ||
349 | self.__data.insert(idx, self.__prop.set(value)) | ||
350 | |||
351 | def extend(self, items): | ||
352 | for i in items: | ||
353 | self.append(i) | ||
354 | |||
355 | def sort(self, *args, **kwargs): | ||
356 | self.__data.sort(*args, **kwargs) | ||
357 | |||
358 | def __getitem__(self, key): | ||
359 | return self.__data[key] | ||
360 | |||
361 | def __setitem__(self, key, value): | ||
362 | if isinstance(key, slice): | ||
363 | for v in value: | ||
364 | self.__prop.validate(v) | ||
365 | self.__data[key] = [self.__prop.set(v) for v in value] | ||
366 | else: | ||
367 | self.__prop.validate(value) | ||
368 | self.__data[key] = self.__prop.set(value) | ||
369 | |||
370 | def __delitem__(self, key): | ||
371 | del self.__data[key] | ||
372 | |||
373 | def __contains__(self, item): | ||
374 | return item in self.__data | ||
375 | |||
376 | def __iter__(self): | ||
377 | return iter(self.__data) | ||
378 | |||
379 | def __len__(self): | ||
380 | return len(self.__data) | ||
381 | |||
382 | def __str__(self): | ||
383 | return str(self.__data) | ||
384 | |||
385 | def __repr__(self): | ||
386 | return repr(self.__data) | ||
387 | |||
388 | def __eq__(self, other): | ||
389 | if isinstance(other, ListProxy): | ||
390 | return self.__data == other.__data | ||
391 | |||
392 | return self.__data == other | ||
393 | |||
394 | |||
395 | class ListProp(Property): | ||
396 | """ | ||
397 | A list of SHACL properties | ||
398 | """ | ||
399 | |||
400 | VALID_TYPES = (list, ListProxy) | ||
401 | |||
402 | def __init__(self, prop): | ||
403 | super().__init__() | ||
404 | self.prop = prop | ||
405 | |||
406 | def init(self): | ||
407 | return ListProxy(self.prop) | ||
408 | |||
409 | def validate(self, value): | ||
410 | super().validate(value) | ||
411 | |||
412 | for i in value: | ||
413 | self.prop.validate(i) | ||
414 | |||
415 | def set(self, value): | ||
416 | if isinstance(value, ListProxy): | ||
417 | return value | ||
418 | |||
419 | return ListProxy(self.prop, [self.prop.set(d) for d in value]) | ||
420 | |||
421 | def check_min_count(self, value, min_count): | ||
422 | check_type(value, ListProxy) | ||
423 | return len(value) >= min_count | ||
424 | |||
425 | def check_max_count(self, value, max_count): | ||
426 | check_type(value, ListProxy) | ||
427 | return len(value) <= max_count | ||
428 | |||
429 | def elide(self, value): | ||
430 | check_type(value, ListProxy) | ||
431 | return len(value) == 0 | ||
432 | |||
433 | def walk(self, value, callback, path): | ||
434 | callback(value, path) | ||
435 | for idx, v in enumerate(value): | ||
436 | self.prop.walk(v, callback, path + [f"[{idx}]"]) | ||
437 | |||
438 | def iter_objects(self, value, recursive, visited): | ||
439 | for v in value: | ||
440 | for c in self.prop.iter_objects(v, recursive, visited): | ||
441 | yield c | ||
442 | |||
443 | def link_prop(self, value, objectset, missing, visited): | ||
444 | if isinstance(value, ListProxy): | ||
445 | data = [self.prop.link_prop(v, objectset, missing, visited) for v in value] | ||
446 | else: | ||
447 | data = [self.prop.link_prop(v, objectset, missing, visited) for v in value] | ||
448 | |||
449 | return ListProxy(self.prop, data=data) | ||
450 | |||
451 | def encode(self, encoder, value, state): | ||
452 | check_type(value, ListProxy) | ||
453 | |||
454 | with encoder.write_list() as list_s: | ||
455 | for v in value: | ||
456 | with list_s.write_list_item() as item_s: | ||
457 | self.prop.encode(item_s, v, state) | ||
458 | |||
459 | def decode(self, decoder, *, objectset=None): | ||
460 | data = [] | ||
461 | for val_d in decoder.read_list(): | ||
462 | v = self.prop.decode(val_d, objectset=objectset) | ||
463 | self.prop.validate(v) | ||
464 | data.append(v) | ||
465 | |||
466 | return ListProxy(self.prop, data=data) | ||
467 | |||
468 | |||
469 | class EnumProp(IRIProp): | ||
470 | VALID_TYPES = str | ||
471 | |||
472 | def __init__(self, values, *, pattern=None): | ||
473 | super().__init__(values, pattern=pattern) | ||
474 | |||
475 | def validate(self, value): | ||
476 | super().validate(value) | ||
477 | |||
478 | valid_values = self.iri_values() | ||
479 | if value not in valid_values: | ||
480 | raise ValueError( | ||
481 | f"'{value}' is not a valid value. Choose one of {' '.join(valid_values)}" | ||
482 | ) | ||
483 | |||
484 | def encode(self, encoder, value, state): | ||
485 | encoder.write_enum(value, self, self.compact(value)) | ||
486 | |||
487 | def decode(self, decoder, *, objectset=None): | ||
488 | v = decoder.read_enum(self) | ||
489 | return self.expand(v) or v | ||
490 | |||
491 | |||
492 | class NodeKind(Enum): | ||
493 | BlankNode = 1 | ||
494 | IRI = 2 | ||
495 | BlankNodeOrIRI = 3 | ||
496 | |||
497 | |||
498 | def is_IRI(s): | ||
499 | if not isinstance(s, str): | ||
500 | return False | ||
501 | if s.startswith("_:"): | ||
502 | return False | ||
503 | if ":" not in s: | ||
504 | return False | ||
505 | return True | ||
506 | |||
507 | |||
508 | def is_blank_node(s): | ||
509 | if not isinstance(s, str): | ||
510 | return False | ||
511 | if not s.startswith("_:"): | ||
512 | return False | ||
513 | return True | ||
514 | |||
515 | |||
516 | def register(type_iri, *, compact_type=None, abstract=False): | ||
517 | def add_class(key, c): | ||
518 | assert ( | ||
519 | key not in SHACLObject.CLASSES | ||
520 | ), f"{key} already registered to {SHACLObject.CLASSES[key].__name__}" | ||
521 | SHACLObject.CLASSES[key] = c | ||
522 | |||
523 | def decorator(c): | ||
524 | global NAMED_INDIVIDUALS | ||
525 | |||
526 | assert issubclass( | ||
527 | c, SHACLObject | ||
528 | ), f"{c.__name__} is not derived from SHACLObject" | ||
529 | |||
530 | c._OBJ_TYPE = type_iri | ||
531 | c.IS_ABSTRACT = abstract | ||
532 | add_class(type_iri, c) | ||
533 | |||
534 | c._OBJ_COMPACT_TYPE = compact_type | ||
535 | if compact_type: | ||
536 | add_class(compact_type, c) | ||
537 | |||
538 | NAMED_INDIVIDUALS |= set(c.NAMED_INDIVIDUALS.values()) | ||
539 | |||
540 | # Registration is deferred until the first instance of class is created | ||
541 | # so that it has access to any other defined class | ||
542 | c._NEEDS_REG = True | ||
543 | return c | ||
544 | |||
545 | return decorator | ||
546 | |||
547 | |||
548 | register_lock = threading.Lock() | ||
549 | NAMED_INDIVIDUALS = set() | ||
550 | |||
551 | |||
552 | @functools.total_ordering | ||
553 | class SHACLObject(object): | ||
554 | CLASSES = {} | ||
555 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
556 | ID_ALIAS = None | ||
557 | IS_ABSTRACT = True | ||
558 | |||
559 | def __init__(self, **kwargs): | ||
560 | if self._is_abstract(): | ||
561 | raise NotImplementedError( | ||
562 | f"{self.__class__.__name__} is abstract and cannot be implemented" | ||
563 | ) | ||
564 | |||
565 | with register_lock: | ||
566 | cls = self.__class__ | ||
567 | if cls._NEEDS_REG: | ||
568 | cls._OBJ_PROPERTIES = {} | ||
569 | cls._OBJ_IRIS = {} | ||
570 | cls._register_props() | ||
571 | cls._NEEDS_REG = False | ||
572 | |||
573 | self.__dict__["_obj_data"] = {} | ||
574 | self.__dict__["_obj_metadata"] = {} | ||
575 | |||
576 | for iri, prop, _, _, _, _ in self.__iter_props(): | ||
577 | self.__dict__["_obj_data"][iri] = prop.init() | ||
578 | |||
579 | for k, v in kwargs.items(): | ||
580 | setattr(self, k, v) | ||
581 | |||
582 | def _is_abstract(self): | ||
583 | return self.__class__.IS_ABSTRACT | ||
584 | |||
585 | @classmethod | ||
586 | def _register_props(cls): | ||
587 | cls._add_property("_id", StringProp(), iri="@id") | ||
588 | |||
589 | @classmethod | ||
590 | def _add_property( | ||
591 | cls, | ||
592 | pyname, | ||
593 | prop, | ||
594 | iri, | ||
595 | min_count=None, | ||
596 | max_count=None, | ||
597 | compact=None, | ||
598 | ): | ||
599 | if pyname in cls._OBJ_IRIS: | ||
600 | raise KeyError(f"'{pyname}' is already defined for '{cls.__name__}'") | ||
601 | if iri in cls._OBJ_PROPERTIES: | ||
602 | raise KeyError(f"'{iri}' is already defined for '{cls.__name__}'") | ||
603 | |||
604 | while hasattr(cls, pyname): | ||
605 | pyname = pyname + "_" | ||
606 | |||
607 | pyname = sys.intern(pyname) | ||
608 | iri = sys.intern(iri) | ||
609 | |||
610 | cls._OBJ_IRIS[pyname] = iri | ||
611 | cls._OBJ_PROPERTIES[iri] = (prop, min_count, max_count, pyname, compact) | ||
612 | |||
613 | def __setattr__(self, name, value): | ||
614 | if name == self.ID_ALIAS: | ||
615 | self["@id"] = value | ||
616 | return | ||
617 | |||
618 | try: | ||
619 | iri = self._OBJ_IRIS[name] | ||
620 | self[iri] = value | ||
621 | except KeyError: | ||
622 | raise AttributeError( | ||
623 | f"'{name}' is not a valid property of {self.__class__.__name__}" | ||
624 | ) | ||
625 | |||
626 | def __getattr__(self, name): | ||
627 | if name in self._OBJ_IRIS: | ||
628 | return self.__dict__["_obj_data"][self._OBJ_IRIS[name]] | ||
629 | |||
630 | if name == self.ID_ALIAS: | ||
631 | return self.__dict__["_obj_data"]["@id"] | ||
632 | |||
633 | if name == "_metadata": | ||
634 | return self.__dict__["_obj_metadata"] | ||
635 | |||
636 | if name == "_IRI": | ||
637 | return self._OBJ_IRIS | ||
638 | |||
639 | if name == "TYPE": | ||
640 | return self.__class__._OBJ_TYPE | ||
641 | |||
642 | if name == "COMPACT_TYPE": | ||
643 | return self.__class__._OBJ_COMPACT_TYPE | ||
644 | |||
645 | raise AttributeError( | ||
646 | f"'{name}' is not a valid property of {self.__class__.__name__}" | ||
647 | ) | ||
648 | |||
649 | def __delattr__(self, name): | ||
650 | if name == self.ID_ALIAS: | ||
651 | del self["@id"] | ||
652 | return | ||
653 | |||
654 | try: | ||
655 | iri = self._OBJ_IRIS[name] | ||
656 | del self[iri] | ||
657 | except KeyError: | ||
658 | raise AttributeError( | ||
659 | f"'{name}' is not a valid property of {self.__class__.__name__}" | ||
660 | ) | ||
661 | |||
662 | def __get_prop(self, iri): | ||
663 | if iri not in self._OBJ_PROPERTIES: | ||
664 | raise KeyError( | ||
665 | f"'{iri}' is not a valid property of {self.__class__.__name__}" | ||
666 | ) | ||
667 | |||
668 | return self._OBJ_PROPERTIES[iri] | ||
669 | |||
670 | def __iter_props(self): | ||
671 | for iri, v in self._OBJ_PROPERTIES.items(): | ||
672 | yield iri, *v | ||
673 | |||
674 | def __getitem__(self, iri): | ||
675 | return self.__dict__["_obj_data"][iri] | ||
676 | |||
677 | def __setitem__(self, iri, value): | ||
678 | if iri == "@id": | ||
679 | if self.NODE_KIND == NodeKind.BlankNode: | ||
680 | if not is_blank_node(value): | ||
681 | raise ValueError( | ||
682 | f"{self.__class__.__name__} ({id(self)}) can only have local reference. Property '{iri}' cannot be set to '{value}' and must start with '_:'" | ||
683 | ) | ||
684 | elif self.NODE_KIND == NodeKind.IRI: | ||
685 | if not is_IRI(value): | ||
686 | raise ValueError( | ||
687 | f"{self.__class__.__name__} ({id(self)}) can only have an IRI value. Property '{iri}' cannot be set to '{value}'" | ||
688 | ) | ||
689 | else: | ||
690 | if not is_blank_node(value) and not is_IRI(value): | ||
691 | raise ValueError( | ||
692 | f"{self.__class__.__name__} ({id(self)}) Has invalid Property '{iri}' '{value}'. Must be a blank node or IRI" | ||
693 | ) | ||
694 | |||
695 | prop, _, _, _, _ = self.__get_prop(iri) | ||
696 | prop.validate(value) | ||
697 | self.__dict__["_obj_data"][iri] = prop.set(value) | ||
698 | |||
699 | def __delitem__(self, iri): | ||
700 | prop, _, _, _, _ = self.__get_prop(iri) | ||
701 | self.__dict__["_obj_data"][iri] = prop.init() | ||
702 | |||
703 | def __iter__(self): | ||
704 | return self._OBJ_PROPERTIES.keys() | ||
705 | |||
706 | def walk(self, callback, path=None): | ||
707 | """ | ||
708 | Walk object tree, invoking the callback for each item | ||
709 | |||
710 | Callback has the form: | ||
711 | |||
712 | def callback(object, path): | ||
713 | """ | ||
714 | if path is None: | ||
715 | path = ["."] | ||
716 | |||
717 | if callback(self, path): | ||
718 | for iri, prop, _, _, _, _ in self.__iter_props(): | ||
719 | prop.walk(self.__dict__["_obj_data"][iri], callback, path + [f".{iri}"]) | ||
720 | |||
721 | def property_keys(self): | ||
722 | for iri, _, _, _, pyname, compact in self.__iter_props(): | ||
723 | if iri == "@id": | ||
724 | compact = self.ID_ALIAS | ||
725 | yield pyname, iri, compact | ||
726 | |||
727 | def iter_objects(self, *, recursive=False, visited=None): | ||
728 | """ | ||
729 | Iterate of all objects that are a child of this one | ||
730 | """ | ||
731 | if visited is None: | ||
732 | visited = set() | ||
733 | |||
734 | for iri, prop, _, _, _, _ in self.__iter_props(): | ||
735 | for c in prop.iter_objects( | ||
736 | self.__dict__["_obj_data"][iri], recursive=recursive, visited=visited | ||
737 | ): | ||
738 | yield c | ||
739 | |||
740 | def encode(self, encoder, state): | ||
741 | idname = self.ID_ALIAS or self._OBJ_IRIS["_id"] | ||
742 | if not self._id and self.NODE_KIND == NodeKind.IRI: | ||
743 | raise ValueError( | ||
744 | f"{self.__class__.__name__} ({id(self)}) must have a IRI for property '{idname}'" | ||
745 | ) | ||
746 | |||
747 | if state.is_written(self): | ||
748 | encoder.write_iri(state.get_object_id(self)) | ||
749 | return | ||
750 | |||
751 | state.add_written(self) | ||
752 | |||
753 | with encoder.write_object( | ||
754 | self, | ||
755 | state.get_object_id(self), | ||
756 | bool(self._id) or state.is_refed(self), | ||
757 | ) as obj_s: | ||
758 | self._encode_properties(obj_s, state) | ||
759 | |||
760 | def _encode_properties(self, encoder, state): | ||
761 | for iri, prop, min_count, max_count, pyname, compact in self.__iter_props(): | ||
762 | value = self.__dict__["_obj_data"][iri] | ||
763 | if prop.elide(value): | ||
764 | if min_count: | ||
765 | raise ValueError( | ||
766 | f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) is required (currently {value!r})" | ||
767 | ) | ||
768 | continue | ||
769 | |||
770 | if min_count is not None: | ||
771 | if not prop.check_min_count(value, min_count): | ||
772 | raise ValueError( | ||
773 | f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) requires a minimum of {min_count} elements" | ||
774 | ) | ||
775 | |||
776 | if max_count is not None: | ||
777 | if not prop.check_max_count(value, max_count): | ||
778 | raise ValueError( | ||
779 | f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) requires a maximum of {max_count} elements" | ||
780 | ) | ||
781 | |||
782 | if iri == self._OBJ_IRIS["_id"]: | ||
783 | continue | ||
784 | |||
785 | with encoder.write_property(iri, compact) as prop_s: | ||
786 | prop.encode(prop_s, value, state) | ||
787 | |||
788 | @classmethod | ||
789 | def _make_object(cls, typ): | ||
790 | if typ not in cls.CLASSES: | ||
791 | raise TypeError(f"Unknown type {typ}") | ||
792 | |||
793 | return cls.CLASSES[typ]() | ||
794 | |||
795 | @classmethod | ||
796 | def decode(cls, decoder, *, objectset=None): | ||
797 | typ, obj_d = decoder.read_object() | ||
798 | if typ is None: | ||
799 | raise TypeError("Unable to determine type for object") | ||
800 | |||
801 | obj = cls._make_object(typ) | ||
802 | for key in (obj.ID_ALIAS, obj._OBJ_IRIS["_id"]): | ||
803 | with obj_d.read_property(key) as prop_d: | ||
804 | if prop_d is None: | ||
805 | continue | ||
806 | |||
807 | _id = prop_d.read_iri() | ||
808 | if _id is None: | ||
809 | raise TypeError(f"Object key '{key}' is the wrong type") | ||
810 | |||
811 | obj._id = _id | ||
812 | break | ||
813 | |||
814 | if obj.NODE_KIND == NodeKind.IRI and not obj._id: | ||
815 | raise ValueError("Object is missing required IRI") | ||
816 | |||
817 | if objectset is not None: | ||
818 | if obj._id: | ||
819 | v = objectset.find_by_id(_id) | ||
820 | if v is not None: | ||
821 | return v | ||
822 | |||
823 | obj._decode_properties(obj_d, objectset=objectset) | ||
824 | |||
825 | if objectset is not None: | ||
826 | objectset.add_index(obj) | ||
827 | return obj | ||
828 | |||
829 | def _decode_properties(self, decoder, objectset=None): | ||
830 | for key in decoder.object_keys(): | ||
831 | if not self._decode_prop(decoder, key, objectset=objectset): | ||
832 | raise KeyError(f"Unknown property '{key}'") | ||
833 | |||
834 | def _decode_prop(self, decoder, key, objectset=None): | ||
835 | if key in (self._OBJ_IRIS["_id"], self.ID_ALIAS): | ||
836 | return True | ||
837 | |||
838 | for iri, prop, _, _, _, compact in self.__iter_props(): | ||
839 | if compact == key: | ||
840 | read_key = compact | ||
841 | elif iri == key: | ||
842 | read_key = iri | ||
843 | else: | ||
844 | continue | ||
845 | |||
846 | with decoder.read_property(read_key) as prop_d: | ||
847 | v = prop.decode(prop_d, objectset=objectset) | ||
848 | prop.validate(v) | ||
849 | self.__dict__["_obj_data"][iri] = v | ||
850 | return True | ||
851 | |||
852 | return False | ||
853 | |||
854 | def link_helper(self, objectset, missing, visited): | ||
855 | if self in visited: | ||
856 | return | ||
857 | |||
858 | visited.add(self) | ||
859 | |||
860 | for iri, prop, _, _, _, _ in self.__iter_props(): | ||
861 | self.__dict__["_obj_data"][iri] = prop.link_prop( | ||
862 | self.__dict__["_obj_data"][iri], | ||
863 | objectset, | ||
864 | missing, | ||
865 | visited, | ||
866 | ) | ||
867 | |||
868 | def __str__(self): | ||
869 | parts = [ | ||
870 | f"{self.__class__.__name__}(", | ||
871 | ] | ||
872 | if self._id: | ||
873 | parts.append(f"@id='{self._id}'") | ||
874 | parts.append(")") | ||
875 | return "".join(parts) | ||
876 | |||
877 | def __hash__(self): | ||
878 | return super().__hash__() | ||
879 | |||
880 | def __eq__(self, other): | ||
881 | return super().__eq__(other) | ||
882 | |||
883 | def __lt__(self, other): | ||
884 | def sort_key(obj): | ||
885 | if isinstance(obj, str): | ||
886 | return (obj, "", "", "") | ||
887 | return ( | ||
888 | obj._id or "", | ||
889 | obj.TYPE, | ||
890 | getattr(obj, "name", None) or "", | ||
891 | id(obj), | ||
892 | ) | ||
893 | |||
894 | return sort_key(self) < sort_key(other) | ||
895 | |||
896 | |||
897 | class SHACLExtensibleObject(object): | ||
898 | CLOSED = False | ||
899 | |||
900 | def __init__(self, typ=None, **kwargs): | ||
901 | if typ: | ||
902 | self.__dict__["_obj_TYPE"] = (typ, None) | ||
903 | else: | ||
904 | self.__dict__["_obj_TYPE"] = (self._OBJ_TYPE, self._OBJ_COMPACT_TYPE) | ||
905 | super().__init__(**kwargs) | ||
906 | |||
907 | def _is_abstract(self): | ||
908 | # Unknown classes are assumed to not be abstract so that they can be | ||
909 | # deserialized | ||
910 | typ = self.__dict__["_obj_TYPE"][0] | ||
911 | if typ in self.__class__.CLASSES: | ||
912 | return self.__class__.CLASSES[typ].IS_ABSTRACT | ||
913 | |||
914 | return False | ||
915 | |||
916 | @classmethod | ||
917 | def _make_object(cls, typ): | ||
918 | # Check for a known type, and if so, deserialize as that instead | ||
919 | if typ in cls.CLASSES: | ||
920 | return cls.CLASSES[typ]() | ||
921 | |||
922 | obj = cls(typ) | ||
923 | return obj | ||
924 | |||
925 | def _decode_properties(self, decoder, objectset=None): | ||
926 | def decode_value(d): | ||
927 | if not d.is_list(): | ||
928 | return d.read_value() | ||
929 | |||
930 | return [decode_value(val_d) for val_d in d.read_list()] | ||
931 | |||
932 | if self.CLOSED: | ||
933 | super()._decode_properties(decoder, objectset=objectset) | ||
934 | return | ||
935 | |||
936 | for key in decoder.object_keys(): | ||
937 | if self._decode_prop(decoder, key, objectset=objectset): | ||
938 | continue | ||
939 | |||
940 | if not is_IRI(key): | ||
941 | raise KeyError( | ||
942 | f"Extensible object properties must be IRIs. Got '{key}'" | ||
943 | ) | ||
944 | |||
945 | with decoder.read_property(key) as prop_d: | ||
946 | self.__dict__["_obj_data"][key] = decode_value(prop_d) | ||
947 | |||
948 | def _encode_properties(self, encoder, state): | ||
949 | def encode_value(encoder, v): | ||
950 | if isinstance(v, bool): | ||
951 | encoder.write_bool(v) | ||
952 | elif isinstance(v, str): | ||
953 | encoder.write_string(v) | ||
954 | elif isinstance(v, int): | ||
955 | encoder.write_integer(v) | ||
956 | elif isinstance(v, float): | ||
957 | encoder.write_float(v) | ||
958 | elif isinstance(v, list): | ||
959 | with encoder.write_list() as list_s: | ||
960 | for i in v: | ||
961 | with list_s.write_list_item() as item_s: | ||
962 | encode_value(item_s, i) | ||
963 | else: | ||
964 | raise TypeError( | ||
965 | f"Unsupported serialized type {type(v)} with value '{v}'" | ||
966 | ) | ||
967 | |||
968 | super()._encode_properties(encoder, state) | ||
969 | if self.CLOSED: | ||
970 | return | ||
971 | |||
972 | for iri, value in self.__dict__["_obj_data"].items(): | ||
973 | if iri in self._OBJ_PROPERTIES: | ||
974 | continue | ||
975 | |||
976 | with encoder.write_property(iri) as prop_s: | ||
977 | encode_value(prop_s, value) | ||
978 | |||
979 | def __setitem__(self, iri, value): | ||
980 | try: | ||
981 | super().__setitem__(iri, value) | ||
982 | except KeyError: | ||
983 | if self.CLOSED: | ||
984 | raise | ||
985 | |||
986 | if not is_IRI(iri): | ||
987 | raise KeyError(f"Key '{iri}' must be an IRI") | ||
988 | self.__dict__["_obj_data"][iri] = value | ||
989 | |||
990 | def __delitem__(self, iri): | ||
991 | try: | ||
992 | super().__delitem__(iri) | ||
993 | except KeyError: | ||
994 | if self.CLOSED: | ||
995 | raise | ||
996 | |||
997 | if not is_IRI(iri): | ||
998 | raise KeyError(f"Key '{iri}' must be an IRI") | ||
999 | del self.__dict__["_obj_data"][iri] | ||
1000 | |||
1001 | def __getattr__(self, name): | ||
1002 | if name == "TYPE": | ||
1003 | return self.__dict__["_obj_TYPE"][0] | ||
1004 | if name == "COMPACT_TYPE": | ||
1005 | return self.__dict__["_obj_TYPE"][1] | ||
1006 | return super().__getattr__(name) | ||
1007 | |||
1008 | def property_keys(self): | ||
1009 | iris = set() | ||
1010 | for pyname, iri, compact in super().property_keys(): | ||
1011 | iris.add(iri) | ||
1012 | yield pyname, iri, compact | ||
1013 | |||
1014 | if self.CLOSED: | ||
1015 | return | ||
1016 | |||
1017 | for iri in self.__dict__["_obj_data"].keys(): | ||
1018 | if iri not in iris: | ||
1019 | yield None, iri, None | ||
1020 | |||
1021 | |||
1022 | class SHACLObjectSet(object): | ||
1023 | def __init__(self, objects=[], *, link=False): | ||
1024 | self.objects = set() | ||
1025 | self.missing_ids = set() | ||
1026 | for o in objects: | ||
1027 | self.objects.add(o) | ||
1028 | self.create_index() | ||
1029 | if link: | ||
1030 | self._link() | ||
1031 | |||
1032 | def create_index(self): | ||
1033 | """ | ||
1034 | (re)Create object index | ||
1035 | |||
1036 | Creates or recreates the indices for the object set to enable fast | ||
1037 | lookup. All objects and their children are walked and indexed | ||
1038 | """ | ||
1039 | self.obj_by_id = {} | ||
1040 | self.obj_by_type = {} | ||
1041 | for o in self.foreach(): | ||
1042 | self.add_index(o) | ||
1043 | |||
1044 | def add_index(self, obj): | ||
1045 | """ | ||
1046 | Add object to index | ||
1047 | |||
1048 | Adds the object to all appropriate indices | ||
1049 | """ | ||
1050 | |||
1051 | def reg_type(typ, compact, o, exact): | ||
1052 | self.obj_by_type.setdefault(typ, set()).add((exact, o)) | ||
1053 | if compact: | ||
1054 | self.obj_by_type.setdefault(compact, set()).add((exact, o)) | ||
1055 | |||
1056 | if not isinstance(obj, SHACLObject): | ||
1057 | raise TypeError("Object is not of type SHACLObject") | ||
1058 | |||
1059 | for typ in SHACLObject.CLASSES.values(): | ||
1060 | if isinstance(obj, typ): | ||
1061 | reg_type( | ||
1062 | typ._OBJ_TYPE, typ._OBJ_COMPACT_TYPE, obj, obj.__class__ is typ | ||
1063 | ) | ||
1064 | |||
1065 | # This covers custom extensions | ||
1066 | reg_type(obj.TYPE, obj.COMPACT_TYPE, obj, True) | ||
1067 | |||
1068 | if not obj._id: | ||
1069 | return | ||
1070 | |||
1071 | self.missing_ids.discard(obj._id) | ||
1072 | |||
1073 | if obj._id in self.obj_by_id: | ||
1074 | return | ||
1075 | |||
1076 | self.obj_by_id[obj._id] = obj | ||
1077 | |||
1078 | def add(self, obj): | ||
1079 | """ | ||
1080 | Add object to object set | ||
1081 | |||
1082 | Adds a SHACLObject to the object set and index it. | ||
1083 | |||
1084 | NOTE: Child objects of the attached object are not indexes | ||
1085 | """ | ||
1086 | if not isinstance(obj, SHACLObject): | ||
1087 | raise TypeError("Object is not of type SHACLObject") | ||
1088 | |||
1089 | if obj not in self.objects: | ||
1090 | self.objects.add(obj) | ||
1091 | self.add_index(obj) | ||
1092 | return obj | ||
1093 | |||
1094 | def update(self, *others): | ||
1095 | """ | ||
1096 | Update object set adding all objects in each other iterable | ||
1097 | """ | ||
1098 | for o in others: | ||
1099 | for obj in o: | ||
1100 | self.add(obj) | ||
1101 | |||
1102 | def __contains__(self, item): | ||
1103 | """ | ||
1104 | Returns True if the item is in the object set | ||
1105 | """ | ||
1106 | return item in self.objects | ||
1107 | |||
1108 | def link(self): | ||
1109 | """ | ||
1110 | Link object set | ||
1111 | |||
1112 | Links the object in the object set by replacing string object | ||
1113 | references with references to the objects themselves. e.g. | ||
1114 | a property that references object "https://foo/bar" by a string | ||
1115 | reference will be replaced with an actual reference to the object in | ||
1116 | the object set with the same ID if it exists in the object set | ||
1117 | |||
1118 | If multiple objects with the same ID are found, the duplicates are | ||
1119 | eliminated | ||
1120 | """ | ||
1121 | self.create_index() | ||
1122 | return self._link() | ||
1123 | |||
1124 | def _link(self): | ||
1125 | global NAMED_INDIVIDUALS | ||
1126 | |||
1127 | self.missing_ids = set() | ||
1128 | visited = set() | ||
1129 | |||
1130 | new_objects = set() | ||
1131 | |||
1132 | for o in self.objects: | ||
1133 | if o._id: | ||
1134 | o = self.find_by_id(o._id, o) | ||
1135 | o.link_helper(self, self.missing_ids, visited) | ||
1136 | new_objects.add(o) | ||
1137 | |||
1138 | self.objects = new_objects | ||
1139 | |||
1140 | # Remove blank nodes | ||
1141 | obj_by_id = {} | ||
1142 | for _id, obj in self.obj_by_id.items(): | ||
1143 | if _id.startswith("_:"): | ||
1144 | del obj._id | ||
1145 | else: | ||
1146 | obj_by_id[_id] = obj | ||
1147 | self.obj_by_id = obj_by_id | ||
1148 | |||
1149 | # Named individuals aren't considered missing | ||
1150 | self.missing_ids -= NAMED_INDIVIDUALS | ||
1151 | |||
1152 | return self.missing_ids | ||
1153 | |||
1154 | def find_by_id(self, _id, default=None): | ||
1155 | """ | ||
1156 | Find object by ID | ||
1157 | |||
1158 | Returns objects that match the specified ID, or default if there is no | ||
1159 | object with the specified ID | ||
1160 | """ | ||
1161 | if _id not in self.obj_by_id: | ||
1162 | return default | ||
1163 | return self.obj_by_id[_id] | ||
1164 | |||
1165 | def foreach(self): | ||
1166 | """ | ||
1167 | Iterate over every object in the object set, and all child objects | ||
1168 | """ | ||
1169 | visited = set() | ||
1170 | for o in self.objects: | ||
1171 | if o not in visited: | ||
1172 | yield o | ||
1173 | visited.add(o) | ||
1174 | |||
1175 | for child in o.iter_objects(recursive=True, visited=visited): | ||
1176 | yield child | ||
1177 | |||
1178 | def foreach_type(self, typ, *, match_subclass=True): | ||
1179 | """ | ||
1180 | Iterate over each object of a specified type (or subclass there of) | ||
1181 | |||
1182 | If match_subclass is True, and class derived from typ will also match | ||
1183 | (similar to isinstance()). If False, only exact matches will be | ||
1184 | returned | ||
1185 | """ | ||
1186 | if not isinstance(typ, str): | ||
1187 | if not issubclass(typ, SHACLObject): | ||
1188 | raise TypeError(f"Type must be derived from SHACLObject, got {typ}") | ||
1189 | typ = typ._OBJ_TYPE | ||
1190 | |||
1191 | if typ not in self.obj_by_type: | ||
1192 | return | ||
1193 | |||
1194 | for exact, o in self.obj_by_type[typ]: | ||
1195 | if match_subclass or exact: | ||
1196 | yield o | ||
1197 | |||
1198 | def merge(self, *objectsets): | ||
1199 | """ | ||
1200 | Merge object sets | ||
1201 | |||
1202 | Returns a new object set that is the combination of this object set and | ||
1203 | all provided arguments | ||
1204 | """ | ||
1205 | new_objects = set() | ||
1206 | new_objects |= self.objects | ||
1207 | for d in objectsets: | ||
1208 | new_objects |= d.objects | ||
1209 | |||
1210 | return SHACLObjectSet(new_objects, link=True) | ||
1211 | |||
1212 | def encode(self, encoder, force_list=False, *, key=None): | ||
1213 | """ | ||
1214 | Serialize a list of objects to a serialization encoder | ||
1215 | |||
1216 | If force_list is true, a list will always be written using the encoder. | ||
1217 | """ | ||
1218 | ref_counts = {} | ||
1219 | state = EncodeState() | ||
1220 | |||
1221 | def walk_callback(value, path): | ||
1222 | nonlocal state | ||
1223 | nonlocal ref_counts | ||
1224 | |||
1225 | if not isinstance(value, SHACLObject): | ||
1226 | return True | ||
1227 | |||
1228 | # Remove blank node ID for re-assignment | ||
1229 | if value._id and value._id.startswith("_:"): | ||
1230 | del value._id | ||
1231 | |||
1232 | if value._id: | ||
1233 | state.add_refed(value) | ||
1234 | |||
1235 | # If the object is referenced more than once, add it to the set of | ||
1236 | # referenced objects | ||
1237 | ref_counts.setdefault(value, 0) | ||
1238 | ref_counts[value] += 1 | ||
1239 | if ref_counts[value] > 1: | ||
1240 | state.add_refed(value) | ||
1241 | return False | ||
1242 | |||
1243 | return True | ||
1244 | |||
1245 | for o in self.objects: | ||
1246 | if o._id: | ||
1247 | state.add_refed(o) | ||
1248 | o.walk(walk_callback) | ||
1249 | |||
1250 | use_list = force_list or len(self.objects) > 1 | ||
1251 | |||
1252 | if use_list: | ||
1253 | # If we are making a list add all the objects referred to by reference | ||
1254 | # to the list | ||
1255 | objects = list(self.objects | state.ref_objects) | ||
1256 | else: | ||
1257 | objects = list(self.objects) | ||
1258 | |||
1259 | objects.sort(key=key) | ||
1260 | |||
1261 | if use_list: | ||
1262 | # Ensure top level objects are only written in the top level graph | ||
1263 | # node, and referenced by ID everywhere else. This is done by setting | ||
1264 | # the flag that indicates this object has been written for all the top | ||
1265 | # level objects, then clearing it right before serializing the object. | ||
1266 | # | ||
1267 | # In this way, if an object is referenced before it is supposed to be | ||
1268 | # serialized into the @graph, it will serialize as a string instead of | ||
1269 | # the actual object | ||
1270 | for o in objects: | ||
1271 | state.written_objects.add(o) | ||
1272 | |||
1273 | with encoder.write_list() as list_s: | ||
1274 | for o in objects: | ||
1275 | # Allow this specific object to be written now | ||
1276 | state.written_objects.remove(o) | ||
1277 | with list_s.write_list_item() as item_s: | ||
1278 | o.encode(item_s, state) | ||
1279 | |||
1280 | elif objects: | ||
1281 | objects[0].encode(encoder, state) | ||
1282 | |||
1283 | def decode(self, decoder): | ||
1284 | self.create_index() | ||
1285 | |||
1286 | for obj_d in decoder.read_list(): | ||
1287 | o = SHACLObject.decode(obj_d, objectset=self) | ||
1288 | self.objects.add(o) | ||
1289 | |||
1290 | self._link() | ||
1291 | |||
1292 | |||
1293 | class EncodeState(object): | ||
1294 | def __init__(self): | ||
1295 | self.ref_objects = set() | ||
1296 | self.written_objects = set() | ||
1297 | self.blank_objects = {} | ||
1298 | |||
1299 | def get_object_id(self, o): | ||
1300 | if o._id: | ||
1301 | return o._id | ||
1302 | |||
1303 | if o not in self.blank_objects: | ||
1304 | _id = f"_:{o.__class__.__name__}{len(self.blank_objects)}" | ||
1305 | self.blank_objects[o] = _id | ||
1306 | |||
1307 | return self.blank_objects[o] | ||
1308 | |||
1309 | def is_refed(self, o): | ||
1310 | return o in self.ref_objects | ||
1311 | |||
1312 | def add_refed(self, o): | ||
1313 | self.ref_objects.add(o) | ||
1314 | |||
1315 | def is_written(self, o): | ||
1316 | return o in self.written_objects | ||
1317 | |||
1318 | def add_written(self, o): | ||
1319 | self.written_objects.add(o) | ||
1320 | |||
1321 | |||
1322 | class Decoder(ABC): | ||
1323 | @abstractmethod | ||
1324 | def read_value(self): | ||
1325 | """ | ||
1326 | Consume next item | ||
1327 | |||
1328 | Consumes the next item of any type | ||
1329 | """ | ||
1330 | pass | ||
1331 | |||
1332 | @abstractmethod | ||
1333 | def read_string(self): | ||
1334 | """ | ||
1335 | Consume the next item as a string. | ||
1336 | |||
1337 | Returns the string value of the next item, or `None` if the next item | ||
1338 | is not a string | ||
1339 | """ | ||
1340 | pass | ||
1341 | |||
1342 | @abstractmethod | ||
1343 | def read_datetime(self): | ||
1344 | """ | ||
1345 | Consumes the next item as a date & time string | ||
1346 | |||
1347 | Returns the string value of the next item, if it is a ISO datetime, or | ||
1348 | `None` if the next item is not a ISO datetime string. | ||
1349 | |||
1350 | Note that validation of the string is done by the caller, so a minimal | ||
1351 | implementation can just check if the next item is a string without | ||
1352 | worrying about the format | ||
1353 | """ | ||
1354 | pass | ||
1355 | |||
1356 | @abstractmethod | ||
1357 | def read_integer(self): | ||
1358 | """ | ||
1359 | Consumes the next item as an integer | ||
1360 | |||
1361 | Returns the integer value of the next item, or `None` if the next item | ||
1362 | is not an integer | ||
1363 | """ | ||
1364 | pass | ||
1365 | |||
1366 | @abstractmethod | ||
1367 | def read_iri(self): | ||
1368 | """ | ||
1369 | Consumes the next item as an IRI string | ||
1370 | |||
1371 | Returns the string value of the next item an IRI, or `None` if the next | ||
1372 | item is not an IRI. | ||
1373 | |||
1374 | The returned string should be either a fully-qualified IRI, or a blank | ||
1375 | node ID | ||
1376 | """ | ||
1377 | pass | ||
1378 | |||
1379 | @abstractmethod | ||
1380 | def read_enum(self, e): | ||
1381 | """ | ||
1382 | Consumes the next item as an Enum value string | ||
1383 | |||
1384 | Returns the fully qualified IRI of the next enum item, or `None` if the | ||
1385 | next item is not an enum value. | ||
1386 | |||
1387 | The callee is responsible for validating that the returned IRI is | ||
1388 | actually a member of the specified Enum, so the `Decoder` does not need | ||
1389 | to check that, but can if it wishes | ||
1390 | """ | ||
1391 | pass | ||
1392 | |||
1393 | @abstractmethod | ||
1394 | def read_bool(self): | ||
1395 | """ | ||
1396 | Consume the next item as a boolean value | ||
1397 | |||
1398 | Returns the boolean value of the next item, or `None` if the next item | ||
1399 | is not a boolean | ||
1400 | """ | ||
1401 | pass | ||
1402 | |||
1403 | @abstractmethod | ||
1404 | def read_float(self): | ||
1405 | """ | ||
1406 | Consume the next item as a float value | ||
1407 | |||
1408 | Returns the float value of the next item, or `None` if the next item is | ||
1409 | not a float | ||
1410 | """ | ||
1411 | pass | ||
1412 | |||
1413 | @abstractmethod | ||
1414 | def read_list(self): | ||
1415 | """ | ||
1416 | Consume the next item as a list generator | ||
1417 | |||
1418 | This should generate a `Decoder` object for each item in the list. The | ||
1419 | generated `Decoder` can be used to read the corresponding item from the | ||
1420 | list | ||
1421 | """ | ||
1422 | pass | ||
1423 | |||
1424 | @abstractmethod | ||
1425 | def is_list(self): | ||
1426 | """ | ||
1427 | Checks if the next item is a list | ||
1428 | |||
1429 | Returns True if the next item is a list, or False if it is a scalar | ||
1430 | """ | ||
1431 | pass | ||
1432 | |||
1433 | @abstractmethod | ||
1434 | def read_object(self): | ||
1435 | """ | ||
1436 | Consume next item as an object | ||
1437 | |||
1438 | A context manager that "enters" the next item as a object and yields a | ||
1439 | `Decoder` that can read properties from it. If the next item is not an | ||
1440 | object, yields `None` | ||
1441 | |||
1442 | Properties will be read out of the object using `read_property` and | ||
1443 | `read_object_id` | ||
1444 | """ | ||
1445 | pass | ||
1446 | |||
1447 | @abstractmethod | ||
1448 | @contextmanager | ||
1449 | def read_property(self, key): | ||
1450 | """ | ||
1451 | Read property from object | ||
1452 | |||
1453 | A context manager that yields a `Decoder` that can be used to read the | ||
1454 | value of the property with the given key in current object, or `None` | ||
1455 | if the property does not exist in the current object. | ||
1456 | """ | ||
1457 | pass | ||
1458 | |||
1459 | @abstractmethod | ||
1460 | def object_keys(self): | ||
1461 | """ | ||
1462 | Read property keys from an object | ||
1463 | |||
1464 | Iterates over all the serialized keys for the current object | ||
1465 | """ | ||
1466 | pass | ||
1467 | |||
1468 | @abstractmethod | ||
1469 | def read_object_id(self, alias=None): | ||
1470 | """ | ||
1471 | Read current object ID property | ||
1472 | |||
1473 | Returns the ID of the current object if one is defined, or `None` if | ||
1474 | the current object has no ID. | ||
1475 | |||
1476 | The ID must be a fully qualified IRI or a blank node | ||
1477 | |||
1478 | If `alias` is provided, is is a hint as to another name by which the ID | ||
1479 | might be found, if the `Decoder` supports aliases for an ID | ||
1480 | """ | ||
1481 | pass | ||
1482 | |||
1483 | |||
1484 | class JSONLDDecoder(Decoder): | ||
1485 | def __init__(self, data, root=False): | ||
1486 | self.data = data | ||
1487 | self.root = root | ||
1488 | |||
1489 | def read_value(self): | ||
1490 | if isinstance(self.data, str): | ||
1491 | try: | ||
1492 | return float(self.data) | ||
1493 | except ValueError: | ||
1494 | pass | ||
1495 | return self.data | ||
1496 | |||
1497 | def read_string(self): | ||
1498 | if isinstance(self.data, str): | ||
1499 | return self.data | ||
1500 | return None | ||
1501 | |||
1502 | def read_datetime(self): | ||
1503 | return self.read_string() | ||
1504 | |||
1505 | def read_integer(self): | ||
1506 | if isinstance(self.data, int): | ||
1507 | return self.data | ||
1508 | return None | ||
1509 | |||
1510 | def read_bool(self): | ||
1511 | if isinstance(self.data, bool): | ||
1512 | return self.data | ||
1513 | return None | ||
1514 | |||
1515 | def read_float(self): | ||
1516 | if isinstance(self.data, (int, float, str)): | ||
1517 | return float(self.data) | ||
1518 | return None | ||
1519 | |||
1520 | def read_iri(self): | ||
1521 | if isinstance(self.data, str): | ||
1522 | return self.data | ||
1523 | return None | ||
1524 | |||
1525 | def read_enum(self, e): | ||
1526 | if isinstance(self.data, str): | ||
1527 | return self.data | ||
1528 | return None | ||
1529 | |||
1530 | def read_list(self): | ||
1531 | if self.is_list(): | ||
1532 | for v in self.data: | ||
1533 | yield self.__class__(v) | ||
1534 | else: | ||
1535 | yield self | ||
1536 | |||
1537 | def is_list(self): | ||
1538 | return isinstance(self.data, (list, tuple, set)) | ||
1539 | |||
1540 | def __get_value(self, *keys): | ||
1541 | for k in keys: | ||
1542 | if k and k in self.data: | ||
1543 | return self.data[k] | ||
1544 | return None | ||
1545 | |||
1546 | @contextmanager | ||
1547 | def read_property(self, key): | ||
1548 | v = self.__get_value(key) | ||
1549 | if v is not None: | ||
1550 | yield self.__class__(v) | ||
1551 | else: | ||
1552 | yield None | ||
1553 | |||
1554 | def object_keys(self): | ||
1555 | for key in self.data.keys(): | ||
1556 | if key in ("@type", "type"): | ||
1557 | continue | ||
1558 | if self.root and key == "@context": | ||
1559 | continue | ||
1560 | yield key | ||
1561 | |||
1562 | def read_object(self): | ||
1563 | typ = self.__get_value("@type", "type") | ||
1564 | if typ is not None: | ||
1565 | return typ, self | ||
1566 | |||
1567 | return None, self | ||
1568 | |||
1569 | def read_object_id(self, alias=None): | ||
1570 | return self.__get_value(alias, "@id") | ||
1571 | |||
1572 | |||
1573 | class JSONLDDeserializer(object): | ||
1574 | def deserialize_data(self, data, objectset: SHACLObjectSet): | ||
1575 | if "@graph" in data: | ||
1576 | h = JSONLDDecoder(data["@graph"], True) | ||
1577 | else: | ||
1578 | h = JSONLDDecoder(data, True) | ||
1579 | |||
1580 | objectset.decode(h) | ||
1581 | |||
1582 | def read(self, f, objectset: SHACLObjectSet): | ||
1583 | data = json.load(f) | ||
1584 | self.deserialize_data(data, objectset) | ||
1585 | |||
1586 | |||
1587 | class Encoder(ABC): | ||
1588 | @abstractmethod | ||
1589 | def write_string(self, v): | ||
1590 | """ | ||
1591 | Write a string value | ||
1592 | |||
1593 | Encodes the value as a string in the output | ||
1594 | """ | ||
1595 | pass | ||
1596 | |||
1597 | @abstractmethod | ||
1598 | def write_datetime(self, v): | ||
1599 | """ | ||
1600 | Write a date & time string | ||
1601 | |||
1602 | Encodes the value as an ISO datetime string | ||
1603 | |||
1604 | Note: The provided string is already correctly encoded as an ISO datetime | ||
1605 | """ | ||
1606 | pass | ||
1607 | |||
1608 | @abstractmethod | ||
1609 | def write_integer(self, v): | ||
1610 | """ | ||
1611 | Write an integer value | ||
1612 | |||
1613 | Encodes the value as an integer in the output | ||
1614 | """ | ||
1615 | pass | ||
1616 | |||
1617 | @abstractmethod | ||
1618 | def write_iri(self, v, compact=None): | ||
1619 | """ | ||
1620 | Write IRI | ||
1621 | |||
1622 | Encodes the string as an IRI. Note that the string will be either a | ||
1623 | fully qualified IRI or a blank node ID. If `compact` is provided and | ||
1624 | the serialization supports compacted IRIs, it should be preferred to | ||
1625 | the full IRI | ||
1626 | """ | ||
1627 | pass | ||
1628 | |||
1629 | @abstractmethod | ||
1630 | def write_enum(self, v, e, compact=None): | ||
1631 | """ | ||
1632 | Write enum value IRI | ||
1633 | |||
1634 | Encodes the string enum value IRI. Note that the string will be a fully | ||
1635 | qualified IRI. If `compact` is provided and the serialization supports | ||
1636 | compacted IRIs, it should be preferred to the full IRI. | ||
1637 | """ | ||
1638 | pass | ||
1639 | |||
1640 | @abstractmethod | ||
1641 | def write_bool(self, v): | ||
1642 | """ | ||
1643 | Write boolean | ||
1644 | |||
1645 | Encodes the value as a boolean in the output | ||
1646 | """ | ||
1647 | pass | ||
1648 | |||
1649 | @abstractmethod | ||
1650 | def write_float(self, v): | ||
1651 | """ | ||
1652 | Write float | ||
1653 | |||
1654 | Encodes the value as a floating point number in the output | ||
1655 | """ | ||
1656 | pass | ||
1657 | |||
1658 | @abstractmethod | ||
1659 | @contextmanager | ||
1660 | def write_object(self, o, _id, needs_id): | ||
1661 | """ | ||
1662 | Write object | ||
1663 | |||
1664 | A context manager that yields an `Encoder` that can be used to encode | ||
1665 | the given object properties. | ||
1666 | |||
1667 | The provided ID will always be a valid ID (even if o._id is `None`), in | ||
1668 | case the `Encoder` _must_ have an ID. `needs_id` is a hint to indicate | ||
1669 | to the `Encoder` if an ID must be written or not (if that is even an | ||
1670 | option). If it is `True`, the `Encoder` must encode an ID for the | ||
1671 | object. If `False`, the encoder is not required to encode an ID and may | ||
1672 | omit it. | ||
1673 | |||
1674 | The ID will be either a fully qualified IRI, or a blank node IRI. | ||
1675 | |||
1676 | Properties will be written the object using `write_property` | ||
1677 | """ | ||
1678 | pass | ||
1679 | |||
1680 | @abstractmethod | ||
1681 | @contextmanager | ||
1682 | def write_property(self, iri, compact=None): | ||
1683 | """ | ||
1684 | Write object property | ||
1685 | |||
1686 | A context manager that yields an `Encoder` that can be used to encode | ||
1687 | the value for the property with the given IRI in the current object | ||
1688 | |||
1689 | Note that the IRI will be fully qualified. If `compact` is provided and | ||
1690 | the serialization supports compacted IRIs, it should be preferred to | ||
1691 | the full IRI. | ||
1692 | """ | ||
1693 | pass | ||
1694 | |||
1695 | @abstractmethod | ||
1696 | @contextmanager | ||
1697 | def write_list(self): | ||
1698 | """ | ||
1699 | Write list | ||
1700 | |||
1701 | A context manager that yields an `Encoder` that can be used to encode a | ||
1702 | list. | ||
1703 | |||
1704 | Each item of the list will be added using `write_list_item` | ||
1705 | """ | ||
1706 | pass | ||
1707 | |||
1708 | @abstractmethod | ||
1709 | @contextmanager | ||
1710 | def write_list_item(self): | ||
1711 | """ | ||
1712 | Write list item | ||
1713 | |||
1714 | A context manager that yields an `Encoder` that can be used to encode | ||
1715 | the value for a list item | ||
1716 | """ | ||
1717 | pass | ||
1718 | |||
1719 | |||
1720 | class JSONLDEncoder(Encoder): | ||
1721 | def __init__(self, data=None): | ||
1722 | self.data = data | ||
1723 | |||
1724 | def write_string(self, v): | ||
1725 | self.data = v | ||
1726 | |||
1727 | def write_datetime(self, v): | ||
1728 | self.data = v | ||
1729 | |||
1730 | def write_integer(self, v): | ||
1731 | self.data = v | ||
1732 | |||
1733 | def write_iri(self, v, compact=None): | ||
1734 | self.write_string(compact or v) | ||
1735 | |||
1736 | def write_enum(self, v, e, compact=None): | ||
1737 | self.write_string(compact or v) | ||
1738 | |||
1739 | def write_bool(self, v): | ||
1740 | self.data = v | ||
1741 | |||
1742 | def write_float(self, v): | ||
1743 | self.data = str(v) | ||
1744 | |||
1745 | @contextmanager | ||
1746 | def write_property(self, iri, compact=None): | ||
1747 | s = self.__class__(None) | ||
1748 | yield s | ||
1749 | if s.data is not None: | ||
1750 | self.data[compact or iri] = s.data | ||
1751 | |||
1752 | @contextmanager | ||
1753 | def write_object(self, o, _id, needs_id): | ||
1754 | self.data = { | ||
1755 | "type": o.COMPACT_TYPE or o.TYPE, | ||
1756 | } | ||
1757 | if needs_id: | ||
1758 | self.data[o.ID_ALIAS or "@id"] = _id | ||
1759 | yield self | ||
1760 | |||
1761 | @contextmanager | ||
1762 | def write_list(self): | ||
1763 | self.data = [] | ||
1764 | yield self | ||
1765 | if not self.data: | ||
1766 | self.data = None | ||
1767 | |||
1768 | @contextmanager | ||
1769 | def write_list_item(self): | ||
1770 | s = self.__class__(None) | ||
1771 | yield s | ||
1772 | if s.data is not None: | ||
1773 | self.data.append(s.data) | ||
1774 | |||
1775 | |||
1776 | class JSONLDSerializer(object): | ||
1777 | def __init__(self, **args): | ||
1778 | self.args = args | ||
1779 | |||
1780 | def serialize_data( | ||
1781 | self, | ||
1782 | objectset: SHACLObjectSet, | ||
1783 | force_at_graph=False, | ||
1784 | ): | ||
1785 | h = JSONLDEncoder() | ||
1786 | objectset.encode(h, force_at_graph) | ||
1787 | data = {} | ||
1788 | if len(CONTEXT_URLS) == 1: | ||
1789 | data["@context"] = CONTEXT_URLS[0] | ||
1790 | elif CONTEXT_URLS: | ||
1791 | data["@context"] = CONTEXT_URLS | ||
1792 | |||
1793 | if isinstance(h.data, list): | ||
1794 | data["@graph"] = h.data | ||
1795 | else: | ||
1796 | for k, v in h.data.items(): | ||
1797 | data[k] = v | ||
1798 | |||
1799 | return data | ||
1800 | |||
1801 | def write( | ||
1802 | self, | ||
1803 | objectset: SHACLObjectSet, | ||
1804 | f, | ||
1805 | force_at_graph=False, | ||
1806 | **kwargs, | ||
1807 | ): | ||
1808 | """ | ||
1809 | Write a SHACLObjectSet to a JSON LD file | ||
1810 | |||
1811 | If force_at_graph is True, a @graph node will always be written | ||
1812 | """ | ||
1813 | data = self.serialize_data(objectset, force_at_graph) | ||
1814 | |||
1815 | args = {**self.args, **kwargs} | ||
1816 | |||
1817 | sha1 = hashlib.sha1() | ||
1818 | for chunk in json.JSONEncoder(**args).iterencode(data): | ||
1819 | chunk = chunk.encode("utf-8") | ||
1820 | f.write(chunk) | ||
1821 | sha1.update(chunk) | ||
1822 | |||
1823 | return sha1.hexdigest() | ||
1824 | |||
1825 | |||
1826 | class JSONLDInlineEncoder(Encoder): | ||
1827 | def __init__(self, f, sha1): | ||
1828 | self.f = f | ||
1829 | self.comma = False | ||
1830 | self.sha1 = sha1 | ||
1831 | |||
1832 | def write(self, s): | ||
1833 | s = s.encode("utf-8") | ||
1834 | self.f.write(s) | ||
1835 | self.sha1.update(s) | ||
1836 | |||
1837 | def _write_comma(self): | ||
1838 | if self.comma: | ||
1839 | self.write(",") | ||
1840 | self.comma = False | ||
1841 | |||
1842 | def write_string(self, v): | ||
1843 | self.write(json.dumps(v)) | ||
1844 | |||
1845 | def write_datetime(self, v): | ||
1846 | self.write_string(v) | ||
1847 | |||
1848 | def write_integer(self, v): | ||
1849 | self.write(f"{v}") | ||
1850 | |||
1851 | def write_iri(self, v, compact=None): | ||
1852 | self.write_string(compact or v) | ||
1853 | |||
1854 | def write_enum(self, v, e, compact=None): | ||
1855 | self.write_iri(v, compact) | ||
1856 | |||
1857 | def write_bool(self, v): | ||
1858 | if v: | ||
1859 | self.write("true") | ||
1860 | else: | ||
1861 | self.write("false") | ||
1862 | |||
1863 | def write_float(self, v): | ||
1864 | self.write(json.dumps(str(v))) | ||
1865 | |||
1866 | @contextmanager | ||
1867 | def write_property(self, iri, compact=None): | ||
1868 | self._write_comma() | ||
1869 | self.write_string(compact or iri) | ||
1870 | self.write(":") | ||
1871 | yield self | ||
1872 | self.comma = True | ||
1873 | |||
1874 | @contextmanager | ||
1875 | def write_object(self, o, _id, needs_id): | ||
1876 | self._write_comma() | ||
1877 | |||
1878 | self.write("{") | ||
1879 | self.write_string("type") | ||
1880 | self.write(":") | ||
1881 | self.write_string(o.COMPACT_TYPE or o.TYPE) | ||
1882 | self.comma = True | ||
1883 | |||
1884 | if needs_id: | ||
1885 | self._write_comma() | ||
1886 | self.write_string(o.ID_ALIAS or "@id") | ||
1887 | self.write(":") | ||
1888 | self.write_string(_id) | ||
1889 | self.comma = True | ||
1890 | |||
1891 | self.comma = True | ||
1892 | yield self | ||
1893 | |||
1894 | self.write("}") | ||
1895 | self.comma = True | ||
1896 | |||
1897 | @contextmanager | ||
1898 | def write_list(self): | ||
1899 | self._write_comma() | ||
1900 | self.write("[") | ||
1901 | yield self.__class__(self.f, self.sha1) | ||
1902 | self.write("]") | ||
1903 | self.comma = True | ||
1904 | |||
1905 | @contextmanager | ||
1906 | def write_list_item(self): | ||
1907 | self._write_comma() | ||
1908 | yield self.__class__(self.f, self.sha1) | ||
1909 | self.comma = True | ||
1910 | |||
1911 | |||
1912 | class JSONLDInlineSerializer(object): | ||
1913 | def write( | ||
1914 | self, | ||
1915 | objectset: SHACLObjectSet, | ||
1916 | f, | ||
1917 | force_at_graph=False, | ||
1918 | ): | ||
1919 | """ | ||
1920 | Write a SHACLObjectSet to a JSON LD file | ||
1921 | |||
1922 | Note: force_at_graph is included for compatibility, but ignored. This | ||
1923 | serializer always writes out a graph | ||
1924 | """ | ||
1925 | sha1 = hashlib.sha1() | ||
1926 | h = JSONLDInlineEncoder(f, sha1) | ||
1927 | h.write('{"@context":') | ||
1928 | if len(CONTEXT_URLS) == 1: | ||
1929 | h.write(f'"{CONTEXT_URLS[0]}"') | ||
1930 | elif CONTEXT_URLS: | ||
1931 | h.write('["') | ||
1932 | h.write('","'.join(CONTEXT_URLS)) | ||
1933 | h.write('"]') | ||
1934 | h.write(",") | ||
1935 | |||
1936 | h.write('"@graph":') | ||
1937 | |||
1938 | objectset.encode(h, True) | ||
1939 | h.write("}") | ||
1940 | return sha1.hexdigest() | ||
1941 | |||
1942 | |||
1943 | def print_tree(objects, all_fields=False): | ||
1944 | """ | ||
1945 | Print object tree | ||
1946 | """ | ||
1947 | seen = set() | ||
1948 | |||
1949 | def callback(value, path): | ||
1950 | nonlocal seen | ||
1951 | |||
1952 | s = (" " * (len(path) - 1)) + f"{path[-1]}" | ||
1953 | if isinstance(value, SHACLObject): | ||
1954 | s += f" {value} ({id(value)})" | ||
1955 | is_empty = False | ||
1956 | elif isinstance(value, ListProxy): | ||
1957 | is_empty = len(value) == 0 | ||
1958 | if is_empty: | ||
1959 | s += " []" | ||
1960 | else: | ||
1961 | s += f" {value!r}" | ||
1962 | is_empty = value is None | ||
1963 | |||
1964 | if all_fields or not is_empty: | ||
1965 | print(s) | ||
1966 | |||
1967 | if isinstance(value, SHACLObject): | ||
1968 | if value in seen: | ||
1969 | return False | ||
1970 | seen.add(value) | ||
1971 | return True | ||
1972 | |||
1973 | return True | ||
1974 | |||
1975 | for o in objects: | ||
1976 | o.walk(callback) | ||
1977 | |||
1978 | |||
1979 | # fmt: off | ||
1980 | """Format Guard""" | ||
1981 | |||
1982 | |||
1983 | CONTEXT_URLS = [ | ||
1984 | "https://spdx.org/rdf/3.0.1/spdx-context.jsonld", | ||
1985 | ] | ||
1986 | |||
1987 | |||
1988 | # CLASSES | ||
1989 | # A class for describing the energy consumption incurred by an AI model in | ||
1990 | # different stages of its lifecycle. | ||
1991 | @register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyConsumption", compact_type="ai_EnergyConsumption", abstract=False) | ||
1992 | class ai_EnergyConsumption(SHACLObject): | ||
1993 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
1994 | NAMED_INDIVIDUALS = { | ||
1995 | } | ||
1996 | |||
1997 | @classmethod | ||
1998 | def _register_props(cls): | ||
1999 | super()._register_props() | ||
2000 | # Specifies the amount of energy consumed when finetuning the AI model that is | ||
2001 | # being used in the AI system. | ||
2002 | cls._add_property( | ||
2003 | "ai_finetuningEnergyConsumption", | ||
2004 | ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)), | ||
2005 | iri="https://spdx.org/rdf/3.0.1/terms/AI/finetuningEnergyConsumption", | ||
2006 | compact="ai_finetuningEnergyConsumption", | ||
2007 | ) | ||
2008 | # Specifies the amount of energy consumed during inference time by an AI model | ||
2009 | # that is being used in the AI system. | ||
2010 | cls._add_property( | ||
2011 | "ai_inferenceEnergyConsumption", | ||
2012 | ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)), | ||
2013 | iri="https://spdx.org/rdf/3.0.1/terms/AI/inferenceEnergyConsumption", | ||
2014 | compact="ai_inferenceEnergyConsumption", | ||
2015 | ) | ||
2016 | # Specifies the amount of energy consumed when training the AI model that is | ||
2017 | # being used in the AI system. | ||
2018 | cls._add_property( | ||
2019 | "ai_trainingEnergyConsumption", | ||
2020 | ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)), | ||
2021 | iri="https://spdx.org/rdf/3.0.1/terms/AI/trainingEnergyConsumption", | ||
2022 | compact="ai_trainingEnergyConsumption", | ||
2023 | ) | ||
2024 | |||
2025 | |||
2026 | # The class that helps note down the quantity of energy consumption and the unit | ||
2027 | # used for measurement. | ||
2028 | @register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyConsumptionDescription", compact_type="ai_EnergyConsumptionDescription", abstract=False) | ||
2029 | class ai_EnergyConsumptionDescription(SHACLObject): | ||
2030 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2031 | NAMED_INDIVIDUALS = { | ||
2032 | } | ||
2033 | |||
2034 | @classmethod | ||
2035 | def _register_props(cls): | ||
2036 | super()._register_props() | ||
2037 | # Represents the energy quantity. | ||
2038 | cls._add_property( | ||
2039 | "ai_energyQuantity", | ||
2040 | FloatProp(), | ||
2041 | iri="https://spdx.org/rdf/3.0.1/terms/AI/energyQuantity", | ||
2042 | min_count=1, | ||
2043 | compact="ai_energyQuantity", | ||
2044 | ) | ||
2045 | # Specifies the unit in which energy is measured. | ||
2046 | cls._add_property( | ||
2047 | "ai_energyUnit", | ||
2048 | EnumProp([ | ||
2049 | ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour", "kilowattHour"), | ||
2050 | ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule", "megajoule"), | ||
2051 | ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other", "other"), | ||
2052 | ]), | ||
2053 | iri="https://spdx.org/rdf/3.0.1/terms/AI/energyUnit", | ||
2054 | min_count=1, | ||
2055 | compact="ai_energyUnit", | ||
2056 | ) | ||
2057 | |||
2058 | |||
2059 | # Specifies the unit of energy consumption. | ||
2060 | @register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType", compact_type="ai_EnergyUnitType", abstract=False) | ||
2061 | class ai_EnergyUnitType(SHACLObject): | ||
2062 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2063 | NAMED_INDIVIDUALS = { | ||
2064 | "kilowattHour": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour", | ||
2065 | "megajoule": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule", | ||
2066 | "other": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other", | ||
2067 | } | ||
2068 | # Kilowatt-hour. | ||
2069 | kilowattHour = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour" | ||
2070 | # Megajoule. | ||
2071 | megajoule = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule" | ||
2072 | # Any other units of energy measurement. | ||
2073 | other = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other" | ||
2074 | |||
2075 | |||
2076 | # Specifies the safety risk level. | ||
2077 | @register("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType", compact_type="ai_SafetyRiskAssessmentType", abstract=False) | ||
2078 | class ai_SafetyRiskAssessmentType(SHACLObject): | ||
2079 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2080 | NAMED_INDIVIDUALS = { | ||
2081 | "high": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high", | ||
2082 | "low": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low", | ||
2083 | "medium": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium", | ||
2084 | "serious": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious", | ||
2085 | } | ||
2086 | # The second-highest level of risk posed by an AI system. | ||
2087 | high = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high" | ||
2088 | # Low/no risk is posed by an AI system. | ||
2089 | low = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low" | ||
2090 | # The third-highest level of risk posed by an AI system. | ||
2091 | medium = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium" | ||
2092 | # The highest level of risk posed by an AI system. | ||
2093 | serious = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious" | ||
2094 | |||
2095 | |||
2096 | # Specifies the type of an annotation. | ||
2097 | @register("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType", compact_type="AnnotationType", abstract=False) | ||
2098 | class AnnotationType(SHACLObject): | ||
2099 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2100 | NAMED_INDIVIDUALS = { | ||
2101 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other", | ||
2102 | "review": "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review", | ||
2103 | } | ||
2104 | # Used to store extra information about an Element which is not part of a review (e.g. extra information provided during the creation of the Element). | ||
2105 | other = "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other" | ||
2106 | # Used when someone reviews the Element. | ||
2107 | review = "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review" | ||
2108 | |||
2109 | |||
2110 | # Provides information about the creation of the Element. | ||
2111 | @register("https://spdx.org/rdf/3.0.1/terms/Core/CreationInfo", compact_type="CreationInfo", abstract=False) | ||
2112 | class CreationInfo(SHACLObject): | ||
2113 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2114 | NAMED_INDIVIDUALS = { | ||
2115 | } | ||
2116 | |||
2117 | @classmethod | ||
2118 | def _register_props(cls): | ||
2119 | super()._register_props() | ||
2120 | # Provide consumers with comments by the creator of the Element about the | ||
2121 | # Element. | ||
2122 | cls._add_property( | ||
2123 | "comment", | ||
2124 | StringProp(), | ||
2125 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
2126 | compact="comment", | ||
2127 | ) | ||
2128 | # Identifies when the Element was originally created. | ||
2129 | cls._add_property( | ||
2130 | "created", | ||
2131 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
2132 | iri="https://spdx.org/rdf/3.0.1/terms/Core/created", | ||
2133 | min_count=1, | ||
2134 | compact="created", | ||
2135 | ) | ||
2136 | # Identifies who or what created the Element. | ||
2137 | cls._add_property( | ||
2138 | "createdBy", | ||
2139 | ListProp(ObjectProp(Agent, False, context=[ | ||
2140 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
2141 | ],)), | ||
2142 | iri="https://spdx.org/rdf/3.0.1/terms/Core/createdBy", | ||
2143 | min_count=1, | ||
2144 | compact="createdBy", | ||
2145 | ) | ||
2146 | # Identifies the tooling that was used during the creation of the Element. | ||
2147 | cls._add_property( | ||
2148 | "createdUsing", | ||
2149 | ListProp(ObjectProp(Tool, False)), | ||
2150 | iri="https://spdx.org/rdf/3.0.1/terms/Core/createdUsing", | ||
2151 | compact="createdUsing", | ||
2152 | ) | ||
2153 | # Provides a reference number that can be used to understand how to parse and | ||
2154 | # interpret an Element. | ||
2155 | cls._add_property( | ||
2156 | "specVersion", | ||
2157 | StringProp(pattern=r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$",), | ||
2158 | iri="https://spdx.org/rdf/3.0.1/terms/Core/specVersion", | ||
2159 | min_count=1, | ||
2160 | compact="specVersion", | ||
2161 | ) | ||
2162 | |||
2163 | |||
2164 | # A key with an associated value. | ||
2165 | @register("https://spdx.org/rdf/3.0.1/terms/Core/DictionaryEntry", compact_type="DictionaryEntry", abstract=False) | ||
2166 | class DictionaryEntry(SHACLObject): | ||
2167 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2168 | NAMED_INDIVIDUALS = { | ||
2169 | } | ||
2170 | |||
2171 | @classmethod | ||
2172 | def _register_props(cls): | ||
2173 | super()._register_props() | ||
2174 | # A key used in a generic key-value pair. | ||
2175 | cls._add_property( | ||
2176 | "key", | ||
2177 | StringProp(), | ||
2178 | iri="https://spdx.org/rdf/3.0.1/terms/Core/key", | ||
2179 | min_count=1, | ||
2180 | compact="key", | ||
2181 | ) | ||
2182 | # A value used in a generic key-value pair. | ||
2183 | cls._add_property( | ||
2184 | "value", | ||
2185 | StringProp(), | ||
2186 | iri="https://spdx.org/rdf/3.0.1/terms/Core/value", | ||
2187 | compact="value", | ||
2188 | ) | ||
2189 | |||
2190 | |||
2191 | # Base domain class from which all other SPDX-3.0 domain classes derive. | ||
2192 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Element", compact_type="Element", abstract=True) | ||
2193 | class Element(SHACLObject): | ||
2194 | NODE_KIND = NodeKind.IRI | ||
2195 | ID_ALIAS = "spdxId" | ||
2196 | NAMED_INDIVIDUALS = { | ||
2197 | } | ||
2198 | |||
2199 | @classmethod | ||
2200 | def _register_props(cls): | ||
2201 | super()._register_props() | ||
2202 | # Provide consumers with comments by the creator of the Element about the | ||
2203 | # Element. | ||
2204 | cls._add_property( | ||
2205 | "comment", | ||
2206 | StringProp(), | ||
2207 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
2208 | compact="comment", | ||
2209 | ) | ||
2210 | # Provides information about the creation of the Element. | ||
2211 | cls._add_property( | ||
2212 | "creationInfo", | ||
2213 | ObjectProp(CreationInfo, True), | ||
2214 | iri="https://spdx.org/rdf/3.0.1/terms/Core/creationInfo", | ||
2215 | min_count=1, | ||
2216 | compact="creationInfo", | ||
2217 | ) | ||
2218 | # Provides a detailed description of the Element. | ||
2219 | cls._add_property( | ||
2220 | "description", | ||
2221 | StringProp(), | ||
2222 | iri="https://spdx.org/rdf/3.0.1/terms/Core/description", | ||
2223 | compact="description", | ||
2224 | ) | ||
2225 | # Specifies an Extension characterization of some aspect of an Element. | ||
2226 | cls._add_property( | ||
2227 | "extension", | ||
2228 | ListProp(ObjectProp(extension_Extension, False)), | ||
2229 | iri="https://spdx.org/rdf/3.0.1/terms/Core/extension", | ||
2230 | compact="extension", | ||
2231 | ) | ||
2232 | # Provides a reference to a resource outside the scope of SPDX-3.0 content | ||
2233 | # that uniquely identifies an Element. | ||
2234 | cls._add_property( | ||
2235 | "externalIdentifier", | ||
2236 | ListProp(ObjectProp(ExternalIdentifier, False)), | ||
2237 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalIdentifier", | ||
2238 | compact="externalIdentifier", | ||
2239 | ) | ||
2240 | # Points to a resource outside the scope of the SPDX-3.0 content | ||
2241 | # that provides additional characteristics of an Element. | ||
2242 | cls._add_property( | ||
2243 | "externalRef", | ||
2244 | ListProp(ObjectProp(ExternalRef, False)), | ||
2245 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalRef", | ||
2246 | compact="externalRef", | ||
2247 | ) | ||
2248 | # Identifies the name of an Element as designated by the creator. | ||
2249 | cls._add_property( | ||
2250 | "name", | ||
2251 | StringProp(), | ||
2252 | iri="https://spdx.org/rdf/3.0.1/terms/Core/name", | ||
2253 | compact="name", | ||
2254 | ) | ||
2255 | # A short description of an Element. | ||
2256 | cls._add_property( | ||
2257 | "summary", | ||
2258 | StringProp(), | ||
2259 | iri="https://spdx.org/rdf/3.0.1/terms/Core/summary", | ||
2260 | compact="summary", | ||
2261 | ) | ||
2262 | # Provides an IntegrityMethod with which the integrity of an Element can be | ||
2263 | # asserted. | ||
2264 | cls._add_property( | ||
2265 | "verifiedUsing", | ||
2266 | ListProp(ObjectProp(IntegrityMethod, False)), | ||
2267 | iri="https://spdx.org/rdf/3.0.1/terms/Core/verifiedUsing", | ||
2268 | compact="verifiedUsing", | ||
2269 | ) | ||
2270 | |||
2271 | |||
2272 | # A collection of Elements, not necessarily with unifying context. | ||
2273 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ElementCollection", compact_type="ElementCollection", abstract=True) | ||
2274 | class ElementCollection(Element): | ||
2275 | NODE_KIND = NodeKind.IRI | ||
2276 | ID_ALIAS = "spdxId" | ||
2277 | NAMED_INDIVIDUALS = { | ||
2278 | } | ||
2279 | |||
2280 | @classmethod | ||
2281 | def _register_props(cls): | ||
2282 | super()._register_props() | ||
2283 | # Refers to one or more Elements that are part of an ElementCollection. | ||
2284 | cls._add_property( | ||
2285 | "element", | ||
2286 | ListProp(ObjectProp(Element, False, context=[ | ||
2287 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
2288 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
2289 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
2290 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
2291 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
2292 | ],)), | ||
2293 | iri="https://spdx.org/rdf/3.0.1/terms/Core/element", | ||
2294 | compact="element", | ||
2295 | ) | ||
2296 | # Describes one a profile which the creator of this ElementCollection intends to | ||
2297 | # conform to. | ||
2298 | cls._add_property( | ||
2299 | "profileConformance", | ||
2300 | ListProp(EnumProp([ | ||
2301 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai", "ai"), | ||
2302 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build", "build"), | ||
2303 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core", "core"), | ||
2304 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset", "dataset"), | ||
2305 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing", "expandedLicensing"), | ||
2306 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension", "extension"), | ||
2307 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite", "lite"), | ||
2308 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security", "security"), | ||
2309 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing", "simpleLicensing"), | ||
2310 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software", "software"), | ||
2311 | ])), | ||
2312 | iri="https://spdx.org/rdf/3.0.1/terms/Core/profileConformance", | ||
2313 | compact="profileConformance", | ||
2314 | ) | ||
2315 | # This property is used to denote the root Element(s) of a tree of elements contained in a BOM. | ||
2316 | cls._add_property( | ||
2317 | "rootElement", | ||
2318 | ListProp(ObjectProp(Element, False, context=[ | ||
2319 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
2320 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
2321 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
2322 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
2323 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
2324 | ],)), | ||
2325 | iri="https://spdx.org/rdf/3.0.1/terms/Core/rootElement", | ||
2326 | compact="rootElement", | ||
2327 | ) | ||
2328 | |||
2329 | |||
2330 | # A reference to a resource identifier defined outside the scope of SPDX-3.0 content that uniquely identifies an Element. | ||
2331 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifier", compact_type="ExternalIdentifier", abstract=False) | ||
2332 | class ExternalIdentifier(SHACLObject): | ||
2333 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2334 | NAMED_INDIVIDUALS = { | ||
2335 | } | ||
2336 | |||
2337 | @classmethod | ||
2338 | def _register_props(cls): | ||
2339 | super()._register_props() | ||
2340 | # Provide consumers with comments by the creator of the Element about the | ||
2341 | # Element. | ||
2342 | cls._add_property( | ||
2343 | "comment", | ||
2344 | StringProp(), | ||
2345 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
2346 | compact="comment", | ||
2347 | ) | ||
2348 | # Specifies the type of the external identifier. | ||
2349 | cls._add_property( | ||
2350 | "externalIdentifierType", | ||
2351 | EnumProp([ | ||
2352 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22", "cpe22"), | ||
2353 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23", "cpe23"), | ||
2354 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve", "cve"), | ||
2355 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email", "email"), | ||
2356 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid", "gitoid"), | ||
2357 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other", "other"), | ||
2358 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl", "packageUrl"), | ||
2359 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther", "securityOther"), | ||
2360 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid", "swhid"), | ||
2361 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid", "swid"), | ||
2362 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme", "urlScheme"), | ||
2363 | ]), | ||
2364 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalIdentifierType", | ||
2365 | min_count=1, | ||
2366 | compact="externalIdentifierType", | ||
2367 | ) | ||
2368 | # Uniquely identifies an external element. | ||
2369 | cls._add_property( | ||
2370 | "identifier", | ||
2371 | StringProp(), | ||
2372 | iri="https://spdx.org/rdf/3.0.1/terms/Core/identifier", | ||
2373 | min_count=1, | ||
2374 | compact="identifier", | ||
2375 | ) | ||
2376 | # Provides the location for more information regarding an external identifier. | ||
2377 | cls._add_property( | ||
2378 | "identifierLocator", | ||
2379 | ListProp(AnyURIProp()), | ||
2380 | iri="https://spdx.org/rdf/3.0.1/terms/Core/identifierLocator", | ||
2381 | compact="identifierLocator", | ||
2382 | ) | ||
2383 | # An entity that is authorized to issue identification credentials. | ||
2384 | cls._add_property( | ||
2385 | "issuingAuthority", | ||
2386 | StringProp(), | ||
2387 | iri="https://spdx.org/rdf/3.0.1/terms/Core/issuingAuthority", | ||
2388 | compact="issuingAuthority", | ||
2389 | ) | ||
2390 | |||
2391 | |||
2392 | # Specifies the type of an external identifier. | ||
2393 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType", compact_type="ExternalIdentifierType", abstract=False) | ||
2394 | class ExternalIdentifierType(SHACLObject): | ||
2395 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2396 | NAMED_INDIVIDUALS = { | ||
2397 | "cpe22": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22", | ||
2398 | "cpe23": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23", | ||
2399 | "cve": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve", | ||
2400 | "email": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email", | ||
2401 | "gitoid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid", | ||
2402 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other", | ||
2403 | "packageUrl": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl", | ||
2404 | "securityOther": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther", | ||
2405 | "swhid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid", | ||
2406 | "swid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid", | ||
2407 | "urlScheme": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme", | ||
2408 | } | ||
2409 | # [Common Platform Enumeration Specification 2.2](https://cpe.mitre.org/files/cpe-specification_2.2.pdf) | ||
2410 | cpe22 = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22" | ||
2411 | # [Common Platform Enumeration: Naming Specification Version 2.3](https://csrc.nist.gov/publications/detail/nistir/7695/final) | ||
2412 | cpe23 = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23" | ||
2413 | # Common Vulnerabilities and Exposures identifiers, an identifier for a specific software flaw defined within the official CVE Dictionary and that conforms to the [CVE specification](https://csrc.nist.gov/glossary/term/cve_id). | ||
2414 | cve = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve" | ||
2415 | # Email address, as defined in [RFC 3696](https://datatracker.ietf.org/doc/rfc3986/) Section 3. | ||
2416 | email = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email" | ||
2417 | # [Gitoid](https://www.iana.org/assignments/uri-schemes/prov/gitoid), stands for [Git Object ID](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects). A gitoid of type blob is a unique hash of a binary artifact. A gitoid may represent either an [Artifact Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-identifier-types) for the software artifact or an [Input Manifest Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#input-manifest-identifier) for the software artifact's associated [Artifact Input Manifest](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-input-manifest); this ambiguity exists because the Artifact Input Manifest is itself an artifact, and the gitoid of that artifact is its valid identifier. Gitoids calculated on software artifacts (Snippet, File, or Package Elements) should be recorded in the SPDX 3.0 SoftwareArtifact's contentIdentifier property. Gitoids calculated on the Artifact Input Manifest (Input Manifest Identifier) should be recorded in the SPDX 3.0 Element's externalIdentifier property. See [OmniBOR Specification](https://github.com/omnibor/spec/), a minimalistic specification for describing software [Artifact Dependency Graphs](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-dependency-graph-adg). | ||
2418 | gitoid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid" | ||
2419 | # Used when the type does not match any of the other options. | ||
2420 | other = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other" | ||
2421 | # Package URL, as defined in the corresponding [Annex](../../../annexes/pkg-url-specification.md) of this specification. | ||
2422 | packageUrl = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl" | ||
2423 | # Used when there is a security related identifier of unspecified type. | ||
2424 | securityOther = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther" | ||
2425 | # SoftWare Hash IDentifier, a persistent intrinsic identifier for digital artifacts, such as files, trees (also known as directories or folders), commits, and other objects typically found in version control systems. The format of the identifiers is defined in the [SWHID specification](https://www.swhid.org/specification/v1.1/4.Syntax) (ISO/IEC DIS 18670). They typically look like `swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`. | ||
2426 | swhid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid" | ||
2427 | # Concise Software Identification (CoSWID) tag, as defined in [RFC 9393](https://datatracker.ietf.org/doc/rfc9393/) Section 2.3. | ||
2428 | swid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid" | ||
2429 | # [Uniform Resource Identifier (URI) Schemes](https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml). The scheme used in order to locate a resource. | ||
2430 | urlScheme = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme" | ||
2431 | |||
2432 | |||
2433 | # A map of Element identifiers that are used within an SpdxDocument but defined | ||
2434 | # external to that SpdxDocument. | ||
2435 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalMap", compact_type="ExternalMap", abstract=False) | ||
2436 | class ExternalMap(SHACLObject): | ||
2437 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2438 | NAMED_INDIVIDUALS = { | ||
2439 | } | ||
2440 | |||
2441 | @classmethod | ||
2442 | def _register_props(cls): | ||
2443 | super()._register_props() | ||
2444 | # Artifact representing a serialization instance of SPDX data containing the | ||
2445 | # definition of a particular Element. | ||
2446 | cls._add_property( | ||
2447 | "definingArtifact", | ||
2448 | ObjectProp(Artifact, False), | ||
2449 | iri="https://spdx.org/rdf/3.0.1/terms/Core/definingArtifact", | ||
2450 | compact="definingArtifact", | ||
2451 | ) | ||
2452 | # Identifies an external Element used within an SpdxDocument but defined | ||
2453 | # external to that SpdxDocument. | ||
2454 | cls._add_property( | ||
2455 | "externalSpdxId", | ||
2456 | AnyURIProp(), | ||
2457 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalSpdxId", | ||
2458 | min_count=1, | ||
2459 | compact="externalSpdxId", | ||
2460 | ) | ||
2461 | # Provides an indication of where to retrieve an external Element. | ||
2462 | cls._add_property( | ||
2463 | "locationHint", | ||
2464 | AnyURIProp(), | ||
2465 | iri="https://spdx.org/rdf/3.0.1/terms/Core/locationHint", | ||
2466 | compact="locationHint", | ||
2467 | ) | ||
2468 | # Provides an IntegrityMethod with which the integrity of an Element can be | ||
2469 | # asserted. | ||
2470 | cls._add_property( | ||
2471 | "verifiedUsing", | ||
2472 | ListProp(ObjectProp(IntegrityMethod, False)), | ||
2473 | iri="https://spdx.org/rdf/3.0.1/terms/Core/verifiedUsing", | ||
2474 | compact="verifiedUsing", | ||
2475 | ) | ||
2476 | |||
2477 | |||
2478 | # A reference to a resource outside the scope of SPDX-3.0 content related to an Element. | ||
2479 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRef", compact_type="ExternalRef", abstract=False) | ||
2480 | class ExternalRef(SHACLObject): | ||
2481 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2482 | NAMED_INDIVIDUALS = { | ||
2483 | } | ||
2484 | |||
2485 | @classmethod | ||
2486 | def _register_props(cls): | ||
2487 | super()._register_props() | ||
2488 | # Provide consumers with comments by the creator of the Element about the | ||
2489 | # Element. | ||
2490 | cls._add_property( | ||
2491 | "comment", | ||
2492 | StringProp(), | ||
2493 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
2494 | compact="comment", | ||
2495 | ) | ||
2496 | # Provides information about the content type of an Element or a Property. | ||
2497 | cls._add_property( | ||
2498 | "contentType", | ||
2499 | StringProp(pattern=r"^[^\/]+\/[^\/]+$",), | ||
2500 | iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType", | ||
2501 | compact="contentType", | ||
2502 | ) | ||
2503 | # Specifies the type of the external reference. | ||
2504 | cls._add_property( | ||
2505 | "externalRefType", | ||
2506 | EnumProp([ | ||
2507 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation", "altDownloadLocation"), | ||
2508 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage", "altWebPage"), | ||
2509 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact", "binaryArtifact"), | ||
2510 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower", "bower"), | ||
2511 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta", "buildMeta"), | ||
2512 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem", "buildSystem"), | ||
2513 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport", "certificationReport"), | ||
2514 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat", "chat"), | ||
2515 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport", "componentAnalysisReport"), | ||
2516 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe", "cwe"), | ||
2517 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation", "documentation"), | ||
2518 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport", "dynamicAnalysisReport"), | ||
2519 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice", "eolNotice"), | ||
2520 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment", "exportControlAssessment"), | ||
2521 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding", "funding"), | ||
2522 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker", "issueTracker"), | ||
2523 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license", "license"), | ||
2524 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList", "mailingList"), | ||
2525 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral", "mavenCentral"), | ||
2526 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics", "metrics"), | ||
2527 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm", "npm"), | ||
2528 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget", "nuget"), | ||
2529 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other", "other"), | ||
2530 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment", "privacyAssessment"), | ||
2531 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata", "productMetadata"), | ||
2532 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder", "purchaseOrder"), | ||
2533 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport", "qualityAssessmentReport"), | ||
2534 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory", "releaseHistory"), | ||
2535 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes", "releaseNotes"), | ||
2536 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment", "riskAssessment"), | ||
2537 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport", "runtimeAnalysisReport"), | ||
2538 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation", "secureSoftwareAttestation"), | ||
2539 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel", "securityAdversaryModel"), | ||
2540 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory", "securityAdvisory"), | ||
2541 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix", "securityFix"), | ||
2542 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther", "securityOther"), | ||
2543 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport", "securityPenTestReport"), | ||
2544 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy", "securityPolicy"), | ||
2545 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel", "securityThreatModel"), | ||
2546 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia", "socialMedia"), | ||
2547 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact", "sourceArtifact"), | ||
2548 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport", "staticAnalysisReport"), | ||
2549 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support", "support"), | ||
2550 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs", "vcs"), | ||
2551 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport", "vulnerabilityDisclosureReport"), | ||
2552 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment", "vulnerabilityExploitabilityAssessment"), | ||
2553 | ]), | ||
2554 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalRefType", | ||
2555 | compact="externalRefType", | ||
2556 | ) | ||
2557 | # Provides the location of an external reference. | ||
2558 | cls._add_property( | ||
2559 | "locator", | ||
2560 | ListProp(StringProp()), | ||
2561 | iri="https://spdx.org/rdf/3.0.1/terms/Core/locator", | ||
2562 | compact="locator", | ||
2563 | ) | ||
2564 | |||
2565 | |||
2566 | # Specifies the type of an external reference. | ||
2567 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType", compact_type="ExternalRefType", abstract=False) | ||
2568 | class ExternalRefType(SHACLObject): | ||
2569 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2570 | NAMED_INDIVIDUALS = { | ||
2571 | "altDownloadLocation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation", | ||
2572 | "altWebPage": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage", | ||
2573 | "binaryArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact", | ||
2574 | "bower": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower", | ||
2575 | "buildMeta": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta", | ||
2576 | "buildSystem": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem", | ||
2577 | "certificationReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport", | ||
2578 | "chat": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat", | ||
2579 | "componentAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport", | ||
2580 | "cwe": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe", | ||
2581 | "documentation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation", | ||
2582 | "dynamicAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport", | ||
2583 | "eolNotice": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice", | ||
2584 | "exportControlAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment", | ||
2585 | "funding": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding", | ||
2586 | "issueTracker": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker", | ||
2587 | "license": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license", | ||
2588 | "mailingList": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList", | ||
2589 | "mavenCentral": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral", | ||
2590 | "metrics": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics", | ||
2591 | "npm": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm", | ||
2592 | "nuget": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget", | ||
2593 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other", | ||
2594 | "privacyAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment", | ||
2595 | "productMetadata": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata", | ||
2596 | "purchaseOrder": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder", | ||
2597 | "qualityAssessmentReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport", | ||
2598 | "releaseHistory": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory", | ||
2599 | "releaseNotes": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes", | ||
2600 | "riskAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment", | ||
2601 | "runtimeAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport", | ||
2602 | "secureSoftwareAttestation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation", | ||
2603 | "securityAdversaryModel": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel", | ||
2604 | "securityAdvisory": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory", | ||
2605 | "securityFix": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix", | ||
2606 | "securityOther": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther", | ||
2607 | "securityPenTestReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport", | ||
2608 | "securityPolicy": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy", | ||
2609 | "securityThreatModel": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel", | ||
2610 | "socialMedia": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia", | ||
2611 | "sourceArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact", | ||
2612 | "staticAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport", | ||
2613 | "support": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support", | ||
2614 | "vcs": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs", | ||
2615 | "vulnerabilityDisclosureReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport", | ||
2616 | "vulnerabilityExploitabilityAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment", | ||
2617 | } | ||
2618 | # A reference to an alternative download location. | ||
2619 | altDownloadLocation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation" | ||
2620 | # A reference to an alternative web page. | ||
2621 | altWebPage = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage" | ||
2622 | # A reference to binary artifacts related to a package. | ||
2623 | binaryArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact" | ||
2624 | # A reference to a Bower package. The package locator format, looks like `package#version`, is defined in the "install" section of [Bower API documentation](https://bower.io/docs/api/#install). | ||
2625 | bower = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower" | ||
2626 | # A reference build metadata related to a published package. | ||
2627 | buildMeta = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta" | ||
2628 | # A reference build system used to create or publish the package. | ||
2629 | buildSystem = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem" | ||
2630 | # A reference to a certification report for a package from an accredited/independent body. | ||
2631 | certificationReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport" | ||
2632 | # A reference to the instant messaging system used by the maintainer for a package. | ||
2633 | chat = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat" | ||
2634 | # A reference to a Software Composition Analysis (SCA) report. | ||
2635 | componentAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport" | ||
2636 | # [Common Weakness Enumeration](https://csrc.nist.gov/glossary/term/common_weakness_enumeration). A reference to a source of software flaw defined within the official [CWE List](https://cwe.mitre.org/data/) that conforms to the [CWE specification](https://cwe.mitre.org/). | ||
2637 | cwe = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe" | ||
2638 | # A reference to the documentation for a package. | ||
2639 | documentation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation" | ||
2640 | # A reference to a dynamic analysis report for a package. | ||
2641 | dynamicAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport" | ||
2642 | # A reference to the End Of Sale (EOS) and/or End Of Life (EOL) information related to a package. | ||
2643 | eolNotice = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice" | ||
2644 | # A reference to a export control assessment for a package. | ||
2645 | exportControlAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment" | ||
2646 | # A reference to funding information related to a package. | ||
2647 | funding = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding" | ||
2648 | # A reference to the issue tracker for a package. | ||
2649 | issueTracker = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker" | ||
2650 | # A reference to additional license information related to an artifact. | ||
2651 | license = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license" | ||
2652 | # A reference to the mailing list used by the maintainer for a package. | ||
2653 | mailingList = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList" | ||
2654 | # A reference to a Maven repository artifact. The artifact locator format is defined in the [Maven documentation](https://maven.apache.org/guides/mini/guide-naming-conventions.html) and looks like `groupId:artifactId[:version]`. | ||
2655 | mavenCentral = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral" | ||
2656 | # A reference to metrics related to package such as OpenSSF scorecards. | ||
2657 | metrics = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics" | ||
2658 | # A reference to an npm package. The package locator format is defined in the [npm documentation](https://docs.npmjs.com/cli/v10/configuring-npm/package-json) and looks like `package@version`. | ||
2659 | npm = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm" | ||
2660 | # A reference to a NuGet package. The package locator format is defined in the [NuGet documentation](https://docs.nuget.org) and looks like `package/version`. | ||
2661 | nuget = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget" | ||
2662 | # Used when the type does not match any of the other options. | ||
2663 | other = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other" | ||
2664 | # A reference to a privacy assessment for a package. | ||
2665 | privacyAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment" | ||
2666 | # A reference to additional product metadata such as reference within organization's product catalog. | ||
2667 | productMetadata = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata" | ||
2668 | # A reference to a purchase order for a package. | ||
2669 | purchaseOrder = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder" | ||
2670 | # A reference to a quality assessment for a package. | ||
2671 | qualityAssessmentReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport" | ||
2672 | # A reference to a published list of releases for a package. | ||
2673 | releaseHistory = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory" | ||
2674 | # A reference to the release notes for a package. | ||
2675 | releaseNotes = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes" | ||
2676 | # A reference to a risk assessment for a package. | ||
2677 | riskAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment" | ||
2678 | # A reference to a runtime analysis report for a package. | ||
2679 | runtimeAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport" | ||
2680 | # A reference to information assuring that the software is developed using security practices as defined by [NIST SP 800-218 Secure Software Development Framework (SSDF) Version 1.1](https://csrc.nist.gov/pubs/sp/800/218/final) or [CISA Secure Software Development Attestation Form](https://www.cisa.gov/resources-tools/resources/secure-software-development-attestation-form). | ||
2681 | secureSoftwareAttestation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation" | ||
2682 | # A reference to the security adversary model for a package. | ||
2683 | securityAdversaryModel = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel" | ||
2684 | # A reference to a published security advisory (where advisory as defined per [ISO 29147:2018](https://www.iso.org/standard/72311.html)) that may affect one or more elements, e.g., vendor advisories or specific NVD entries. | ||
2685 | securityAdvisory = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory" | ||
2686 | # A reference to the patch or source code that fixes a vulnerability. | ||
2687 | securityFix = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix" | ||
2688 | # A reference to related security information of unspecified type. | ||
2689 | securityOther = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther" | ||
2690 | # A reference to a [penetration test](https://en.wikipedia.org/wiki/Penetration_test) report for a package. | ||
2691 | securityPenTestReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport" | ||
2692 | # A reference to instructions for reporting newly discovered security vulnerabilities for a package. | ||
2693 | securityPolicy = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy" | ||
2694 | # A reference the [security threat model](https://en.wikipedia.org/wiki/Threat_model) for a package. | ||
2695 | securityThreatModel = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel" | ||
2696 | # A reference to a social media channel for a package. | ||
2697 | socialMedia = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia" | ||
2698 | # A reference to an artifact containing the sources for a package. | ||
2699 | sourceArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact" | ||
2700 | # A reference to a static analysis report for a package. | ||
2701 | staticAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport" | ||
2702 | # A reference to the software support channel or other support information for a package. | ||
2703 | support = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support" | ||
2704 | # A reference to a version control system related to a software artifact. | ||
2705 | vcs = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs" | ||
2706 | # A reference to a Vulnerability Disclosure Report (VDR) which provides the software supplier's analysis and findings describing the impact (or lack of impact) that reported vulnerabilities have on packages or products in the supplier's SBOM as defined in [NIST SP 800-161 Cybersecurity Supply Chain Risk Management Practices for Systems and Organizations](https://csrc.nist.gov/pubs/sp/800/161/r1/final). | ||
2707 | vulnerabilityDisclosureReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport" | ||
2708 | # A reference to a Vulnerability Exploitability eXchange (VEX) statement which provides information on whether a product is impacted by a specific vulnerability in an included package and, if affected, whether there are actions recommended to remediate. See also [NTIA VEX one-page summary](https://ntia.gov/files/ntia/publications/vex_one-page_summary.pdf). | ||
2709 | vulnerabilityExploitabilityAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment" | ||
2710 | |||
2711 | |||
2712 | # A mathematical algorithm that maps data of arbitrary size to a bit string. | ||
2713 | @register("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm", compact_type="HashAlgorithm", abstract=False) | ||
2714 | class HashAlgorithm(SHACLObject): | ||
2715 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2716 | NAMED_INDIVIDUALS = { | ||
2717 | "adler32": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32", | ||
2718 | "blake2b256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256", | ||
2719 | "blake2b384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384", | ||
2720 | "blake2b512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512", | ||
2721 | "blake3": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3", | ||
2722 | "crystalsDilithium": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium", | ||
2723 | "crystalsKyber": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber", | ||
2724 | "falcon": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon", | ||
2725 | "md2": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2", | ||
2726 | "md4": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4", | ||
2727 | "md5": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5", | ||
2728 | "md6": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6", | ||
2729 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other", | ||
2730 | "sha1": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1", | ||
2731 | "sha224": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224", | ||
2732 | "sha256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256", | ||
2733 | "sha384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384", | ||
2734 | "sha3_224": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224", | ||
2735 | "sha3_256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256", | ||
2736 | "sha3_384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384", | ||
2737 | "sha3_512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512", | ||
2738 | "sha512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512", | ||
2739 | } | ||
2740 | # Adler-32 checksum is part of the widely used zlib compression library as defined in [RFC 1950](https://datatracker.ietf.org/doc/rfc1950/) Section 2.3. | ||
2741 | adler32 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32" | ||
2742 | # BLAKE2b algorithm with a digest size of 256, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4. | ||
2743 | blake2b256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256" | ||
2744 | # BLAKE2b algorithm with a digest size of 384, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4. | ||
2745 | blake2b384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384" | ||
2746 | # BLAKE2b algorithm with a digest size of 512, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4. | ||
2747 | blake2b512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512" | ||
2748 | # [BLAKE3](https://github.com/BLAKE3-team/BLAKE3-specs/blob/master/blake3.pdf) | ||
2749 | blake3 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3" | ||
2750 | # [Dilithium](https://pq-crystals.org/dilithium/) | ||
2751 | crystalsDilithium = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium" | ||
2752 | # [Kyber](https://pq-crystals.org/kyber/) | ||
2753 | crystalsKyber = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber" | ||
2754 | # [FALCON](https://falcon-sign.info/falcon.pdf) | ||
2755 | falcon = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon" | ||
2756 | # MD2 message-digest algorithm, as defined in [RFC 1319](https://datatracker.ietf.org/doc/rfc1319/). | ||
2757 | md2 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2" | ||
2758 | # MD4 message-digest algorithm, as defined in [RFC 1186](https://datatracker.ietf.org/doc/rfc1186/). | ||
2759 | md4 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4" | ||
2760 | # MD5 message-digest algorithm, as defined in [RFC 1321](https://datatracker.ietf.org/doc/rfc1321/). | ||
2761 | md5 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5" | ||
2762 | # [MD6 hash function](https://people.csail.mit.edu/rivest/pubs/RABCx08.pdf) | ||
2763 | md6 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6" | ||
2764 | # any hashing algorithm that does not exist in this list of entries | ||
2765 | other = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other" | ||
2766 | # SHA-1, a secure hashing algorithm, as defined in [RFC 3174](https://datatracker.ietf.org/doc/rfc3174/). | ||
2767 | sha1 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1" | ||
2768 | # SHA-2 with a digest length of 224, as defined in [RFC 3874](https://datatracker.ietf.org/doc/rfc3874/). | ||
2769 | sha224 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224" | ||
2770 | # SHA-2 with a digest length of 256, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/). | ||
2771 | sha256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256" | ||
2772 | # SHA-2 with a digest length of 384, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/). | ||
2773 | sha384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384" | ||
2774 | # SHA-3 with a digest length of 224, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final). | ||
2775 | sha3_224 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224" | ||
2776 | # SHA-3 with a digest length of 256, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final). | ||
2777 | sha3_256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256" | ||
2778 | # SHA-3 with a digest length of 384, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final). | ||
2779 | sha3_384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384" | ||
2780 | # SHA-3 with a digest length of 512, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final). | ||
2781 | sha3_512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512" | ||
2782 | # SHA-2 with a digest length of 512, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/). | ||
2783 | sha512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512" | ||
2784 | |||
2785 | |||
2786 | # A concrete subclass of Element used by Individuals in the | ||
2787 | # Core profile. | ||
2788 | @register("https://spdx.org/rdf/3.0.1/terms/Core/IndividualElement", compact_type="IndividualElement", abstract=False) | ||
2789 | class IndividualElement(Element): | ||
2790 | NODE_KIND = NodeKind.IRI | ||
2791 | ID_ALIAS = "spdxId" | ||
2792 | NAMED_INDIVIDUALS = { | ||
2793 | "NoAssertionElement": "https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", | ||
2794 | "NoneElement": "https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", | ||
2795 | } | ||
2796 | # An Individual Value for Element representing a set of Elements of unknown | ||
2797 | # identify or cardinality (number). | ||
2798 | NoAssertionElement = "https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement" | ||
2799 | # An Individual Value for Element representing a set of Elements with | ||
2800 | # cardinality (number/count) of zero. | ||
2801 | NoneElement = "https://spdx.org/rdf/3.0.1/terms/Core/NoneElement" | ||
2802 | |||
2803 | |||
2804 | # Provides an independently reproducible mechanism that permits verification of a specific Element. | ||
2805 | @register("https://spdx.org/rdf/3.0.1/terms/Core/IntegrityMethod", compact_type="IntegrityMethod", abstract=True) | ||
2806 | class IntegrityMethod(SHACLObject): | ||
2807 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2808 | NAMED_INDIVIDUALS = { | ||
2809 | } | ||
2810 | |||
2811 | @classmethod | ||
2812 | def _register_props(cls): | ||
2813 | super()._register_props() | ||
2814 | # Provide consumers with comments by the creator of the Element about the | ||
2815 | # Element. | ||
2816 | cls._add_property( | ||
2817 | "comment", | ||
2818 | StringProp(), | ||
2819 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
2820 | compact="comment", | ||
2821 | ) | ||
2822 | |||
2823 | |||
2824 | # Provide an enumerated set of lifecycle phases that can provide context to relationships. | ||
2825 | @register("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType", compact_type="LifecycleScopeType", abstract=False) | ||
2826 | class LifecycleScopeType(SHACLObject): | ||
2827 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2828 | NAMED_INDIVIDUALS = { | ||
2829 | "build": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build", | ||
2830 | "design": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design", | ||
2831 | "development": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development", | ||
2832 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other", | ||
2833 | "runtime": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime", | ||
2834 | "test": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test", | ||
2835 | } | ||
2836 | # A relationship has specific context implications during an element's build phase, during development. | ||
2837 | build = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build" | ||
2838 | # A relationship has specific context implications during an element's design. | ||
2839 | design = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design" | ||
2840 | # A relationship has specific context implications during development phase of an element. | ||
2841 | development = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development" | ||
2842 | # A relationship has other specific context information necessary to capture that the above set of enumerations does not handle. | ||
2843 | other = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other" | ||
2844 | # A relationship has specific context implications during the execution phase of an element. | ||
2845 | runtime = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime" | ||
2846 | # A relationship has specific context implications during an element's testing phase, during development. | ||
2847 | test = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test" | ||
2848 | |||
2849 | |||
2850 | # A mapping between prefixes and namespace partial URIs. | ||
2851 | @register("https://spdx.org/rdf/3.0.1/terms/Core/NamespaceMap", compact_type="NamespaceMap", abstract=False) | ||
2852 | class NamespaceMap(SHACLObject): | ||
2853 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2854 | NAMED_INDIVIDUALS = { | ||
2855 | } | ||
2856 | |||
2857 | @classmethod | ||
2858 | def _register_props(cls): | ||
2859 | super()._register_props() | ||
2860 | # Provides an unambiguous mechanism for conveying a URI fragment portion of an | ||
2861 | # Element ID. | ||
2862 | cls._add_property( | ||
2863 | "namespace", | ||
2864 | AnyURIProp(), | ||
2865 | iri="https://spdx.org/rdf/3.0.1/terms/Core/namespace", | ||
2866 | min_count=1, | ||
2867 | compact="namespace", | ||
2868 | ) | ||
2869 | # A substitute for a URI. | ||
2870 | cls._add_property( | ||
2871 | "prefix", | ||
2872 | StringProp(), | ||
2873 | iri="https://spdx.org/rdf/3.0.1/terms/Core/prefix", | ||
2874 | min_count=1, | ||
2875 | compact="prefix", | ||
2876 | ) | ||
2877 | |||
2878 | |||
2879 | # An SPDX version 2.X compatible verification method for software packages. | ||
2880 | @register("https://spdx.org/rdf/3.0.1/terms/Core/PackageVerificationCode", compact_type="PackageVerificationCode", abstract=False) | ||
2881 | class PackageVerificationCode(IntegrityMethod): | ||
2882 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2883 | NAMED_INDIVIDUALS = { | ||
2884 | } | ||
2885 | |||
2886 | @classmethod | ||
2887 | def _register_props(cls): | ||
2888 | super()._register_props() | ||
2889 | # Specifies the algorithm used for calculating the hash value. | ||
2890 | cls._add_property( | ||
2891 | "algorithm", | ||
2892 | EnumProp([ | ||
2893 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32", "adler32"), | ||
2894 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256", "blake2b256"), | ||
2895 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384", "blake2b384"), | ||
2896 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512", "blake2b512"), | ||
2897 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3", "blake3"), | ||
2898 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium", "crystalsDilithium"), | ||
2899 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber", "crystalsKyber"), | ||
2900 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon", "falcon"), | ||
2901 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2", "md2"), | ||
2902 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4", "md4"), | ||
2903 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5", "md5"), | ||
2904 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6", "md6"), | ||
2905 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other", "other"), | ||
2906 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1", "sha1"), | ||
2907 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224", "sha224"), | ||
2908 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256", "sha256"), | ||
2909 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384", "sha384"), | ||
2910 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224", "sha3_224"), | ||
2911 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256", "sha3_256"), | ||
2912 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384", "sha3_384"), | ||
2913 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512", "sha3_512"), | ||
2914 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512", "sha512"), | ||
2915 | ]), | ||
2916 | iri="https://spdx.org/rdf/3.0.1/terms/Core/algorithm", | ||
2917 | min_count=1, | ||
2918 | compact="algorithm", | ||
2919 | ) | ||
2920 | # The result of applying a hash algorithm to an Element. | ||
2921 | cls._add_property( | ||
2922 | "hashValue", | ||
2923 | StringProp(), | ||
2924 | iri="https://spdx.org/rdf/3.0.1/terms/Core/hashValue", | ||
2925 | min_count=1, | ||
2926 | compact="hashValue", | ||
2927 | ) | ||
2928 | # The relative file name of a file to be excluded from the | ||
2929 | # `PackageVerificationCode`. | ||
2930 | cls._add_property( | ||
2931 | "packageVerificationCodeExcludedFile", | ||
2932 | ListProp(StringProp()), | ||
2933 | iri="https://spdx.org/rdf/3.0.1/terms/Core/packageVerificationCodeExcludedFile", | ||
2934 | compact="packageVerificationCodeExcludedFile", | ||
2935 | ) | ||
2936 | |||
2937 | |||
2938 | # A tuple of two positive integers that define a range. | ||
2939 | @register("https://spdx.org/rdf/3.0.1/terms/Core/PositiveIntegerRange", compact_type="PositiveIntegerRange", abstract=False) | ||
2940 | class PositiveIntegerRange(SHACLObject): | ||
2941 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2942 | NAMED_INDIVIDUALS = { | ||
2943 | } | ||
2944 | |||
2945 | @classmethod | ||
2946 | def _register_props(cls): | ||
2947 | super()._register_props() | ||
2948 | # Defines the beginning of a range. | ||
2949 | cls._add_property( | ||
2950 | "beginIntegerRange", | ||
2951 | PositiveIntegerProp(), | ||
2952 | iri="https://spdx.org/rdf/3.0.1/terms/Core/beginIntegerRange", | ||
2953 | min_count=1, | ||
2954 | compact="beginIntegerRange", | ||
2955 | ) | ||
2956 | # Defines the end of a range. | ||
2957 | cls._add_property( | ||
2958 | "endIntegerRange", | ||
2959 | PositiveIntegerProp(), | ||
2960 | iri="https://spdx.org/rdf/3.0.1/terms/Core/endIntegerRange", | ||
2961 | min_count=1, | ||
2962 | compact="endIntegerRange", | ||
2963 | ) | ||
2964 | |||
2965 | |||
2966 | # Categories of presence or absence. | ||
2967 | @register("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType", compact_type="PresenceType", abstract=False) | ||
2968 | class PresenceType(SHACLObject): | ||
2969 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2970 | NAMED_INDIVIDUALS = { | ||
2971 | "no": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", | ||
2972 | "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", | ||
2973 | "yes": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", | ||
2974 | } | ||
2975 | # Indicates absence of the field. | ||
2976 | no = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no" | ||
2977 | # Makes no assertion about the field. | ||
2978 | noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion" | ||
2979 | # Indicates presence of the field. | ||
2980 | yes = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes" | ||
2981 | |||
2982 | |||
2983 | # Enumeration of the valid profiles. | ||
2984 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType", compact_type="ProfileIdentifierType", abstract=False) | ||
2985 | class ProfileIdentifierType(SHACLObject): | ||
2986 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2987 | NAMED_INDIVIDUALS = { | ||
2988 | "ai": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai", | ||
2989 | "build": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build", | ||
2990 | "core": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core", | ||
2991 | "dataset": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset", | ||
2992 | "expandedLicensing": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing", | ||
2993 | "extension": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension", | ||
2994 | "lite": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite", | ||
2995 | "security": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security", | ||
2996 | "simpleLicensing": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing", | ||
2997 | "software": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software", | ||
2998 | } | ||
2999 | # the element follows the AI profile specification | ||
3000 | ai = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai" | ||
3001 | # the element follows the Build profile specification | ||
3002 | build = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build" | ||
3003 | # the element follows the Core profile specification | ||
3004 | core = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core" | ||
3005 | # the element follows the Dataset profile specification | ||
3006 | dataset = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset" | ||
3007 | # the element follows the ExpandedLicensing profile specification | ||
3008 | expandedLicensing = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing" | ||
3009 | # the element follows the Extension profile specification | ||
3010 | extension = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension" | ||
3011 | # the element follows the Lite profile specification | ||
3012 | lite = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite" | ||
3013 | # the element follows the Security profile specification | ||
3014 | security = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security" | ||
3015 | # the element follows the SimpleLicensing profile specification | ||
3016 | simpleLicensing = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing" | ||
3017 | # the element follows the Software profile specification | ||
3018 | software = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software" | ||
3019 | |||
3020 | |||
3021 | # Describes a relationship between one or more elements. | ||
3022 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Relationship", compact_type="Relationship", abstract=False) | ||
3023 | class Relationship(Element): | ||
3024 | NODE_KIND = NodeKind.IRI | ||
3025 | ID_ALIAS = "spdxId" | ||
3026 | NAMED_INDIVIDUALS = { | ||
3027 | } | ||
3028 | |||
3029 | @classmethod | ||
3030 | def _register_props(cls): | ||
3031 | super()._register_props() | ||
3032 | # Provides information about the completeness of relationships. | ||
3033 | cls._add_property( | ||
3034 | "completeness", | ||
3035 | EnumProp([ | ||
3036 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete", "complete"), | ||
3037 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete", "incomplete"), | ||
3038 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion", "noAssertion"), | ||
3039 | ]), | ||
3040 | iri="https://spdx.org/rdf/3.0.1/terms/Core/completeness", | ||
3041 | compact="completeness", | ||
3042 | ) | ||
3043 | # Specifies the time from which an element is no longer applicable / valid. | ||
3044 | cls._add_property( | ||
3045 | "endTime", | ||
3046 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
3047 | iri="https://spdx.org/rdf/3.0.1/terms/Core/endTime", | ||
3048 | compact="endTime", | ||
3049 | ) | ||
3050 | # References the Element on the left-hand side of a relationship. | ||
3051 | cls._add_property( | ||
3052 | "from_", | ||
3053 | ObjectProp(Element, True, context=[ | ||
3054 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
3055 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
3056 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
3057 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
3058 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
3059 | ],), | ||
3060 | iri="https://spdx.org/rdf/3.0.1/terms/Core/from", | ||
3061 | min_count=1, | ||
3062 | compact="from", | ||
3063 | ) | ||
3064 | # Information about the relationship between two Elements. | ||
3065 | cls._add_property( | ||
3066 | "relationshipType", | ||
3067 | EnumProp([ | ||
3068 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects", "affects"), | ||
3069 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy", "amendedBy"), | ||
3070 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf", "ancestorOf"), | ||
3071 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom", "availableFrom"), | ||
3072 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures", "configures"), | ||
3073 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains", "contains"), | ||
3074 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy", "coordinatedBy"), | ||
3075 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo", "copiedTo"), | ||
3076 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo", "delegatedTo"), | ||
3077 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn", "dependsOn"), | ||
3078 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf", "descendantOf"), | ||
3079 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes", "describes"), | ||
3080 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect", "doesNotAffect"), | ||
3081 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo", "expandsTo"), | ||
3082 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy", "exploitCreatedBy"), | ||
3083 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy", "fixedBy"), | ||
3084 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn", "fixedIn"), | ||
3085 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy", "foundBy"), | ||
3086 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates", "generates"), | ||
3087 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile", "hasAddedFile"), | ||
3088 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor", "hasAssessmentFor"), | ||
3089 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability", "hasAssociatedVulnerability"), | ||
3090 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense", "hasConcludedLicense"), | ||
3091 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile", "hasDataFile"), | ||
3092 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense", "hasDeclaredLicense"), | ||
3093 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile", "hasDeletedFile"), | ||
3094 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest", "hasDependencyManifest"), | ||
3095 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact", "hasDistributionArtifact"), | ||
3096 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation", "hasDocumentation"), | ||
3097 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink", "hasDynamicLink"), | ||
3098 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence", "hasEvidence"), | ||
3099 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample", "hasExample"), | ||
3100 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost", "hasHost"), | ||
3101 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput", "hasInput"), | ||
3102 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata", "hasMetadata"), | ||
3103 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent", "hasOptionalComponent"), | ||
3104 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency", "hasOptionalDependency"), | ||
3105 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput", "hasOutput"), | ||
3106 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite", "hasPrerequisite"), | ||
3107 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency", "hasProvidedDependency"), | ||
3108 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement", "hasRequirement"), | ||
3109 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification", "hasSpecification"), | ||
3110 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink", "hasStaticLink"), | ||
3111 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest", "hasTest"), | ||
3112 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase", "hasTestCase"), | ||
3113 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant", "hasVariant"), | ||
3114 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy", "invokedBy"), | ||
3115 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy", "modifiedBy"), | ||
3116 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other", "other"), | ||
3117 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy", "packagedBy"), | ||
3118 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy", "patchedBy"), | ||
3119 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy", "publishedBy"), | ||
3120 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy", "reportedBy"), | ||
3121 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy", "republishedBy"), | ||
3122 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact", "serializedInArtifact"), | ||
3123 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn", "testedOn"), | ||
3124 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn", "trainedOn"), | ||
3125 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor", "underInvestigationFor"), | ||
3126 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool", "usesTool"), | ||
3127 | ]), | ||
3128 | iri="https://spdx.org/rdf/3.0.1/terms/Core/relationshipType", | ||
3129 | min_count=1, | ||
3130 | compact="relationshipType", | ||
3131 | ) | ||
3132 | # Specifies the time from which an element is applicable / valid. | ||
3133 | cls._add_property( | ||
3134 | "startTime", | ||
3135 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
3136 | iri="https://spdx.org/rdf/3.0.1/terms/Core/startTime", | ||
3137 | compact="startTime", | ||
3138 | ) | ||
3139 | # References an Element on the right-hand side of a relationship. | ||
3140 | cls._add_property( | ||
3141 | "to", | ||
3142 | ListProp(ObjectProp(Element, False, context=[ | ||
3143 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
3144 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
3145 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
3146 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
3147 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
3148 | ],)), | ||
3149 | iri="https://spdx.org/rdf/3.0.1/terms/Core/to", | ||
3150 | min_count=1, | ||
3151 | compact="to", | ||
3152 | ) | ||
3153 | |||
3154 | |||
3155 | # Indicates whether a relationship is known to be complete, incomplete, or if no assertion is made with respect to relationship completeness. | ||
3156 | @register("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness", compact_type="RelationshipCompleteness", abstract=False) | ||
3157 | class RelationshipCompleteness(SHACLObject): | ||
3158 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3159 | NAMED_INDIVIDUALS = { | ||
3160 | "complete": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete", | ||
3161 | "incomplete": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete", | ||
3162 | "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion", | ||
3163 | } | ||
3164 | # The relationship is known to be exhaustive. | ||
3165 | complete = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete" | ||
3166 | # The relationship is known not to be exhaustive. | ||
3167 | incomplete = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete" | ||
3168 | # No assertion can be made about the completeness of the relationship. | ||
3169 | noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion" | ||
3170 | |||
3171 | |||
3172 | # Information about the relationship between two Elements. | ||
3173 | @register("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType", compact_type="RelationshipType", abstract=False) | ||
3174 | class RelationshipType(SHACLObject): | ||
3175 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3176 | NAMED_INDIVIDUALS = { | ||
3177 | "affects": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects", | ||
3178 | "amendedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy", | ||
3179 | "ancestorOf": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf", | ||
3180 | "availableFrom": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom", | ||
3181 | "configures": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures", | ||
3182 | "contains": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains", | ||
3183 | "coordinatedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy", | ||
3184 | "copiedTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo", | ||
3185 | "delegatedTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo", | ||
3186 | "dependsOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn", | ||
3187 | "descendantOf": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf", | ||
3188 | "describes": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes", | ||
3189 | "doesNotAffect": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect", | ||
3190 | "expandsTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo", | ||
3191 | "exploitCreatedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy", | ||
3192 | "fixedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy", | ||
3193 | "fixedIn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn", | ||
3194 | "foundBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy", | ||
3195 | "generates": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates", | ||
3196 | "hasAddedFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile", | ||
3197 | "hasAssessmentFor": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor", | ||
3198 | "hasAssociatedVulnerability": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability", | ||
3199 | "hasConcludedLicense": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense", | ||
3200 | "hasDataFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile", | ||
3201 | "hasDeclaredLicense": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense", | ||
3202 | "hasDeletedFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile", | ||
3203 | "hasDependencyManifest": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest", | ||
3204 | "hasDistributionArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact", | ||
3205 | "hasDocumentation": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation", | ||
3206 | "hasDynamicLink": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink", | ||
3207 | "hasEvidence": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence", | ||
3208 | "hasExample": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample", | ||
3209 | "hasHost": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost", | ||
3210 | "hasInput": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput", | ||
3211 | "hasMetadata": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata", | ||
3212 | "hasOptionalComponent": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent", | ||
3213 | "hasOptionalDependency": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency", | ||
3214 | "hasOutput": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput", | ||
3215 | "hasPrerequisite": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite", | ||
3216 | "hasProvidedDependency": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency", | ||
3217 | "hasRequirement": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement", | ||
3218 | "hasSpecification": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification", | ||
3219 | "hasStaticLink": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink", | ||
3220 | "hasTest": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest", | ||
3221 | "hasTestCase": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase", | ||
3222 | "hasVariant": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant", | ||
3223 | "invokedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy", | ||
3224 | "modifiedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy", | ||
3225 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other", | ||
3226 | "packagedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy", | ||
3227 | "patchedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy", | ||
3228 | "publishedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy", | ||
3229 | "reportedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy", | ||
3230 | "republishedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy", | ||
3231 | "serializedInArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact", | ||
3232 | "testedOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn", | ||
3233 | "trainedOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn", | ||
3234 | "underInvestigationFor": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor", | ||
3235 | "usesTool": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool", | ||
3236 | } | ||
3237 | # The `from` Vulnerability affects each `to` Element. The use of the `affects` type is constrained to `VexAffectedVulnAssessmentRelationship` classed relationships. | ||
3238 | affects = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects" | ||
3239 | # The `from` Element is amended by each `to` Element. | ||
3240 | amendedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy" | ||
3241 | # The `from` Element is an ancestor of each `to` Element. | ||
3242 | ancestorOf = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf" | ||
3243 | # The `from` Element is available from the additional supplier described by each `to` Element. | ||
3244 | availableFrom = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom" | ||
3245 | # The `from` Element is a configuration applied to each `to` Element, during a LifecycleScopeType period. | ||
3246 | configures = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures" | ||
3247 | # The `from` Element contains each `to` Element. | ||
3248 | contains = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains" | ||
3249 | # The `from` Vulnerability is coordinatedBy the `to` Agent(s) (vendor, researcher, or consumer agent). | ||
3250 | coordinatedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy" | ||
3251 | # The `from` Element has been copied to each `to` Element. | ||
3252 | copiedTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo" | ||
3253 | # The `from` Agent is delegating an action to the Agent of the `to` Relationship (which must be of type invokedBy), during a LifecycleScopeType (e.g. the `to` invokedBy Relationship is being done on behalf of `from`). | ||
3254 | delegatedTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo" | ||
3255 | # The `from` Element depends on each `to` Element, during a LifecycleScopeType period. | ||
3256 | dependsOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn" | ||
3257 | # The `from` Element is a descendant of each `to` Element. | ||
3258 | descendantOf = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf" | ||
3259 | # The `from` Element describes each `to` Element. To denote the root(s) of a tree of elements in a collection, the rootElement property should be used. | ||
3260 | describes = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes" | ||
3261 | # The `from` Vulnerability has no impact on each `to` Element. The use of the `doesNotAffect` is constrained to `VexNotAffectedVulnAssessmentRelationship` classed relationships. | ||
3262 | doesNotAffect = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect" | ||
3263 | # The `from` archive expands out as an artifact described by each `to` Element. | ||
3264 | expandsTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo" | ||
3265 | # The `from` Vulnerability has had an exploit created against it by each `to` Agent. | ||
3266 | exploitCreatedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy" | ||
3267 | # Designates a `from` Vulnerability has been fixed by the `to` Agent(s). | ||
3268 | fixedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy" | ||
3269 | # A `from` Vulnerability has been fixed in each `to` Element. The use of the `fixedIn` type is constrained to `VexFixedVulnAssessmentRelationship` classed relationships. | ||
3270 | fixedIn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn" | ||
3271 | # Designates a `from` Vulnerability was originally discovered by the `to` Agent(s). | ||
3272 | foundBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy" | ||
3273 | # The `from` Element generates each `to` Element. | ||
3274 | generates = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates" | ||
3275 | # Every `to` Element is a file added to the `from` Element (`from` hasAddedFile `to`). | ||
3276 | hasAddedFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile" | ||
3277 | # Relates a `from` Vulnerability and each `to` Element with a security assessment. To be used with `VulnAssessmentRelationship` types. | ||
3278 | hasAssessmentFor = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor" | ||
3279 | # Used to associate a `from` Artifact with each `to` Vulnerability. | ||
3280 | hasAssociatedVulnerability = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability" | ||
3281 | # The `from` SoftwareArtifact is concluded by the SPDX data creator to be governed by each `to` license. | ||
3282 | hasConcludedLicense = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense" | ||
3283 | # The `from` Element treats each `to` Element as a data file. A data file is an artifact that stores data required or optional for the `from` Element's functionality. A data file can be a database file, an index file, a log file, an AI model file, a calibration data file, a temporary file, a backup file, and more. For AI training dataset, test dataset, test artifact, configuration data, build input data, and build output data, please consider using the more specific relationship types: `trainedOn`, `testedOn`, `hasTest`, `configures`, `hasInput`, and `hasOutput`, respectively. This relationship does not imply dependency. | ||
3284 | hasDataFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile" | ||
3285 | # The `from` SoftwareArtifact was discovered to actually contain each `to` license, for example as detected by use of automated tooling. | ||
3286 | hasDeclaredLicense = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense" | ||
3287 | # Every `to` Element is a file deleted from the `from` Element (`from` hasDeletedFile `to`). | ||
3288 | hasDeletedFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile" | ||
3289 | # The `from` Element has manifest files that contain dependency information in each `to` Element. | ||
3290 | hasDependencyManifest = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest" | ||
3291 | # The `from` Element is distributed as an artifact in each `to` Element (e.g. an RPM or archive file). | ||
3292 | hasDistributionArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact" | ||
3293 | # The `from` Element is documented by each `to` Element. | ||
3294 | hasDocumentation = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation" | ||
3295 | # The `from` Element dynamically links in each `to` Element, during a LifecycleScopeType period. | ||
3296 | hasDynamicLink = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink" | ||
3297 | # Every `to` Element is considered as evidence for the `from` Element (`from` hasEvidence `to`). | ||
3298 | hasEvidence = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence" | ||
3299 | # Every `to` Element is an example for the `from` Element (`from` hasExample `to`). | ||
3300 | hasExample = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample" | ||
3301 | # The `from` Build was run on the `to` Element during a LifecycleScopeType period (e.g. the host that the build runs on). | ||
3302 | hasHost = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost" | ||
3303 | # The `from` Build has each `to` Element as an input, during a LifecycleScopeType period. | ||
3304 | hasInput = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput" | ||
3305 | # Every `to` Element is metadata about the `from` Element (`from` hasMetadata `to`). | ||
3306 | hasMetadata = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata" | ||
3307 | # Every `to` Element is an optional component of the `from` Element (`from` hasOptionalComponent `to`). | ||
3308 | hasOptionalComponent = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent" | ||
3309 | # The `from` Element optionally depends on each `to` Element, during a LifecycleScopeType period. | ||
3310 | hasOptionalDependency = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency" | ||
3311 | # The `from` Build element generates each `to` Element as an output, during a LifecycleScopeType period. | ||
3312 | hasOutput = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput" | ||
3313 | # The `from` Element has a prerequisite on each `to` Element, during a LifecycleScopeType period. | ||
3314 | hasPrerequisite = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite" | ||
3315 | # The `from` Element has a dependency on each `to` Element, dependency is not in the distributed artifact, but assumed to be provided, during a LifecycleScopeType period. | ||
3316 | hasProvidedDependency = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency" | ||
3317 | # The `from` Element has a requirement on each `to` Element, during a LifecycleScopeType period. | ||
3318 | hasRequirement = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement" | ||
3319 | # Every `to` Element is a specification for the `from` Element (`from` hasSpecification `to`), during a LifecycleScopeType period. | ||
3320 | hasSpecification = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification" | ||
3321 | # The `from` Element statically links in each `to` Element, during a LifecycleScopeType period. | ||
3322 | hasStaticLink = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink" | ||
3323 | # Every `to` Element is a test artifact for the `from` Element (`from` hasTest `to`), during a LifecycleScopeType period. | ||
3324 | hasTest = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest" | ||
3325 | # Every `to` Element is a test case for the `from` Element (`from` hasTestCase `to`). | ||
3326 | hasTestCase = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase" | ||
3327 | # Every `to` Element is a variant the `from` Element (`from` hasVariant `to`). | ||
3328 | hasVariant = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant" | ||
3329 | # The `from` Element was invoked by the `to` Agent, during a LifecycleScopeType period (for example, a Build element that describes a build step). | ||
3330 | invokedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy" | ||
3331 | # The `from` Element is modified by each `to` Element. | ||
3332 | modifiedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy" | ||
3333 | # Every `to` Element is related to the `from` Element where the relationship type is not described by any of the SPDX relationship types (this relationship is directionless). | ||
3334 | other = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other" | ||
3335 | # Every `to` Element is a packaged instance of the `from` Element (`from` packagedBy `to`). | ||
3336 | packagedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy" | ||
3337 | # Every `to` Element is a patch for the `from` Element (`from` patchedBy `to`). | ||
3338 | patchedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy" | ||
3339 | # Designates a `from` Vulnerability was made available for public use or reference by each `to` Agent. | ||
3340 | publishedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy" | ||
3341 | # Designates a `from` Vulnerability was first reported to a project, vendor, or tracking database for formal identification by each `to` Agent. | ||
3342 | reportedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy" | ||
3343 | # Designates a `from` Vulnerability's details were tracked, aggregated, and/or enriched to improve context (i.e. NVD) by each `to` Agent. | ||
3344 | republishedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy" | ||
3345 | # The `from` SpdxDocument can be found in a serialized form in each `to` Artifact. | ||
3346 | serializedInArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact" | ||
3347 | # The `from` Element has been tested on the `to` Element(s). | ||
3348 | testedOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn" | ||
3349 | # The `from` Element has been trained on the `to` Element(s). | ||
3350 | trainedOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn" | ||
3351 | # The `from` Vulnerability impact is being investigated for each `to` Element. The use of the `underInvestigationFor` type is constrained to `VexUnderInvestigationVulnAssessmentRelationship` classed relationships. | ||
3352 | underInvestigationFor = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor" | ||
3353 | # The `from` Element uses each `to` Element as a tool, during a LifecycleScopeType period. | ||
3354 | usesTool = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool" | ||
3355 | |||
3356 | |||
3357 | # A collection of SPDX Elements that could potentially be serialized. | ||
3358 | @register("https://spdx.org/rdf/3.0.1/terms/Core/SpdxDocument", compact_type="SpdxDocument", abstract=False) | ||
3359 | class SpdxDocument(ElementCollection): | ||
3360 | NODE_KIND = NodeKind.IRI | ||
3361 | ID_ALIAS = "spdxId" | ||
3362 | NAMED_INDIVIDUALS = { | ||
3363 | } | ||
3364 | |||
3365 | @classmethod | ||
3366 | def _register_props(cls): | ||
3367 | super()._register_props() | ||
3368 | # Provides the license under which the SPDX documentation of the Element can be | ||
3369 | # used. | ||
3370 | cls._add_property( | ||
3371 | "dataLicense", | ||
3372 | ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[ | ||
3373 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
3374 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
3375 | ],), | ||
3376 | iri="https://spdx.org/rdf/3.0.1/terms/Core/dataLicense", | ||
3377 | compact="dataLicense", | ||
3378 | ) | ||
3379 | # Provides an ExternalMap of Element identifiers. | ||
3380 | cls._add_property( | ||
3381 | "import_", | ||
3382 | ListProp(ObjectProp(ExternalMap, False)), | ||
3383 | iri="https://spdx.org/rdf/3.0.1/terms/Core/import", | ||
3384 | compact="import", | ||
3385 | ) | ||
3386 | # Provides a NamespaceMap of prefixes and associated namespace partial URIs applicable to an SpdxDocument and independent of any specific serialization format or instance. | ||
3387 | cls._add_property( | ||
3388 | "namespaceMap", | ||
3389 | ListProp(ObjectProp(NamespaceMap, False)), | ||
3390 | iri="https://spdx.org/rdf/3.0.1/terms/Core/namespaceMap", | ||
3391 | compact="namespaceMap", | ||
3392 | ) | ||
3393 | |||
3394 | |||
3395 | # Indicates the type of support that is associated with an artifact. | ||
3396 | @register("https://spdx.org/rdf/3.0.1/terms/Core/SupportType", compact_type="SupportType", abstract=False) | ||
3397 | class SupportType(SHACLObject): | ||
3398 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3399 | NAMED_INDIVIDUALS = { | ||
3400 | "deployed": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed", | ||
3401 | "development": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development", | ||
3402 | "endOfSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport", | ||
3403 | "limitedSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport", | ||
3404 | "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion", | ||
3405 | "noSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport", | ||
3406 | "support": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support", | ||
3407 | } | ||
3408 | # in addition to being supported by the supplier, the software is known to have been deployed and is in use. For a software as a service provider, this implies the software is now available as a service. | ||
3409 | deployed = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed" | ||
3410 | # the artifact is in active development and is not considered ready for formal support from the supplier. | ||
3411 | development = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development" | ||
3412 | # there is a defined end of support for the artifact from the supplier. This may also be referred to as end of life. There is a validUntilDate that can be used to signal when support ends for the artifact. | ||
3413 | endOfSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport" | ||
3414 | # the artifact has been released, and there is limited support available from the supplier. There is a validUntilDate that can provide additional information about the duration of support. | ||
3415 | limitedSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport" | ||
3416 | # no assertion about the type of support is made. This is considered the default if no other support type is used. | ||
3417 | noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion" | ||
3418 | # there is no support for the artifact from the supplier, consumer assumes any support obligations. | ||
3419 | noSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport" | ||
3420 | # the artifact has been released, and is supported from the supplier. There is a validUntilDate that can provide additional information about the duration of support. | ||
3421 | support = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support" | ||
3422 | |||
3423 | |||
3424 | # An element of hardware and/or software utilized to carry out a particular function. | ||
3425 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Tool", compact_type="Tool", abstract=False) | ||
3426 | class Tool(Element): | ||
3427 | NODE_KIND = NodeKind.IRI | ||
3428 | ID_ALIAS = "spdxId" | ||
3429 | NAMED_INDIVIDUALS = { | ||
3430 | } | ||
3431 | |||
3432 | |||
3433 | # Categories of confidentiality level. | ||
3434 | @register("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType", compact_type="dataset_ConfidentialityLevelType", abstract=False) | ||
3435 | class dataset_ConfidentialityLevelType(SHACLObject): | ||
3436 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3437 | NAMED_INDIVIDUALS = { | ||
3438 | "amber": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber", | ||
3439 | "clear": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear", | ||
3440 | "green": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green", | ||
3441 | "red": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red", | ||
3442 | } | ||
3443 | # Data points in the dataset can be shared only with specific organizations and their clients on a need to know basis. | ||
3444 | amber = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber" | ||
3445 | # Dataset may be distributed freely, without restriction. | ||
3446 | clear = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear" | ||
3447 | # Dataset can be shared within a community of peers and partners. | ||
3448 | green = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green" | ||
3449 | # Data points in the dataset are highly confidential and can only be shared with named recipients. | ||
3450 | red = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red" | ||
3451 | |||
3452 | |||
3453 | # Availability of dataset. | ||
3454 | @register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType", compact_type="dataset_DatasetAvailabilityType", abstract=False) | ||
3455 | class dataset_DatasetAvailabilityType(SHACLObject): | ||
3456 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3457 | NAMED_INDIVIDUALS = { | ||
3458 | "clickthrough": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough", | ||
3459 | "directDownload": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload", | ||
3460 | "query": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query", | ||
3461 | "registration": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration", | ||
3462 | "scrapingScript": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript", | ||
3463 | } | ||
3464 | # the dataset is not publicly available and can only be accessed after affirmatively accepting terms on a clickthrough webpage. | ||
3465 | clickthrough = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough" | ||
3466 | # the dataset is publicly available and can be downloaded directly. | ||
3467 | directDownload = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload" | ||
3468 | # the dataset is publicly available, but not all at once, and can only be accessed through queries which return parts of the dataset. | ||
3469 | query = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query" | ||
3470 | # the dataset is not publicly available and an email registration is required before accessing the dataset, although without an affirmative acceptance of terms. | ||
3471 | registration = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration" | ||
3472 | # the dataset provider is not making available the underlying data and the dataset must be reassembled, typically using the provided script for scraping the data. | ||
3473 | scrapingScript = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript" | ||
3474 | |||
3475 | |||
3476 | # Enumeration of dataset types. | ||
3477 | @register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType", compact_type="dataset_DatasetType", abstract=False) | ||
3478 | class dataset_DatasetType(SHACLObject): | ||
3479 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3480 | NAMED_INDIVIDUALS = { | ||
3481 | "audio": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio", | ||
3482 | "categorical": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical", | ||
3483 | "graph": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph", | ||
3484 | "image": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image", | ||
3485 | "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion", | ||
3486 | "numeric": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric", | ||
3487 | "other": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other", | ||
3488 | "sensor": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor", | ||
3489 | "structured": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured", | ||
3490 | "syntactic": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic", | ||
3491 | "text": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text", | ||
3492 | "timeseries": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries", | ||
3493 | "timestamp": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp", | ||
3494 | "video": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video", | ||
3495 | } | ||
3496 | # data is audio based, such as a collection of music from the 80s. | ||
3497 | audio = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio" | ||
3498 | # data that is classified into a discrete number of categories, such as the eye color of a population of people. | ||
3499 | categorical = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical" | ||
3500 | # data is in the form of a graph where entries are somehow related to each other through edges, such a social network of friends. | ||
3501 | graph = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph" | ||
3502 | # data is a collection of images such as pictures of animals. | ||
3503 | image = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image" | ||
3504 | # data type is not known. | ||
3505 | noAssertion = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion" | ||
3506 | # data consists only of numeric entries. | ||
3507 | numeric = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric" | ||
3508 | # data is of a type not included in this list. | ||
3509 | other = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other" | ||
3510 | # data is recorded from a physical sensor, such as a thermometer reading or biometric device. | ||
3511 | sensor = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor" | ||
3512 | # data is stored in tabular format or retrieved from a relational database. | ||
3513 | structured = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured" | ||
3514 | # data describes the syntax or semantics of a language or text, such as a parse tree used for natural language processing. | ||
3515 | syntactic = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic" | ||
3516 | # data consists of unstructured text, such as a book, Wikipedia article (without images), or transcript. | ||
3517 | text = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text" | ||
3518 | # data is recorded in an ordered sequence of timestamped entries, such as the price of a stock over the course of a day. | ||
3519 | timeseries = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries" | ||
3520 | # data is recorded with a timestamp for each entry, but not necessarily ordered or at specific intervals, such as when a taxi ride starts and ends. | ||
3521 | timestamp = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp" | ||
3522 | # data is video based, such as a collection of movie clips featuring Tom Hanks. | ||
3523 | video = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video" | ||
3524 | |||
3525 | |||
3526 | # Abstract class for additional text intended to be added to a License, but | ||
3527 | # which is not itself a standalone License. | ||
3528 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/LicenseAddition", compact_type="expandedlicensing_LicenseAddition", abstract=True) | ||
3529 | class expandedlicensing_LicenseAddition(Element): | ||
3530 | NODE_KIND = NodeKind.IRI | ||
3531 | ID_ALIAS = "spdxId" | ||
3532 | NAMED_INDIVIDUALS = { | ||
3533 | } | ||
3534 | |||
3535 | @classmethod | ||
3536 | def _register_props(cls): | ||
3537 | super()._register_props() | ||
3538 | # Identifies the full text of a LicenseAddition. | ||
3539 | cls._add_property( | ||
3540 | "expandedlicensing_additionText", | ||
3541 | StringProp(), | ||
3542 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/additionText", | ||
3543 | min_count=1, | ||
3544 | compact="expandedlicensing_additionText", | ||
3545 | ) | ||
3546 | # Specifies whether an additional text identifier has been marked as deprecated. | ||
3547 | cls._add_property( | ||
3548 | "expandedlicensing_isDeprecatedAdditionId", | ||
3549 | BooleanProp(), | ||
3550 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isDeprecatedAdditionId", | ||
3551 | compact="expandedlicensing_isDeprecatedAdditionId", | ||
3552 | ) | ||
3553 | # Identifies all the text and metadata associated with a license in the license | ||
3554 | # XML format. | ||
3555 | cls._add_property( | ||
3556 | "expandedlicensing_licenseXml", | ||
3557 | StringProp(), | ||
3558 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/licenseXml", | ||
3559 | compact="expandedlicensing_licenseXml", | ||
3560 | ) | ||
3561 | # Specifies the licenseId that is preferred to be used in place of a deprecated | ||
3562 | # License or LicenseAddition. | ||
3563 | cls._add_property( | ||
3564 | "expandedlicensing_obsoletedBy", | ||
3565 | StringProp(), | ||
3566 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/obsoletedBy", | ||
3567 | compact="expandedlicensing_obsoletedBy", | ||
3568 | ) | ||
3569 | # Contains a URL where the License or LicenseAddition can be found in use. | ||
3570 | cls._add_property( | ||
3571 | "expandedlicensing_seeAlso", | ||
3572 | ListProp(AnyURIProp()), | ||
3573 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/seeAlso", | ||
3574 | compact="expandedlicensing_seeAlso", | ||
3575 | ) | ||
3576 | # Identifies the full text of a LicenseAddition, in SPDX templating format. | ||
3577 | cls._add_property( | ||
3578 | "expandedlicensing_standardAdditionTemplate", | ||
3579 | StringProp(), | ||
3580 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardAdditionTemplate", | ||
3581 | compact="expandedlicensing_standardAdditionTemplate", | ||
3582 | ) | ||
3583 | |||
3584 | |||
3585 | # A license exception that is listed on the SPDX Exceptions list. | ||
3586 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ListedLicenseException", compact_type="expandedlicensing_ListedLicenseException", abstract=False) | ||
3587 | class expandedlicensing_ListedLicenseException(expandedlicensing_LicenseAddition): | ||
3588 | NODE_KIND = NodeKind.IRI | ||
3589 | ID_ALIAS = "spdxId" | ||
3590 | NAMED_INDIVIDUALS = { | ||
3591 | } | ||
3592 | |||
3593 | @classmethod | ||
3594 | def _register_props(cls): | ||
3595 | super()._register_props() | ||
3596 | # Specifies the SPDX License List version in which this license or exception | ||
3597 | # identifier was deprecated. | ||
3598 | cls._add_property( | ||
3599 | "expandedlicensing_deprecatedVersion", | ||
3600 | StringProp(), | ||
3601 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/deprecatedVersion", | ||
3602 | compact="expandedlicensing_deprecatedVersion", | ||
3603 | ) | ||
3604 | # Specifies the SPDX License List version in which this ListedLicense or | ||
3605 | # ListedLicenseException identifier was first added. | ||
3606 | cls._add_property( | ||
3607 | "expandedlicensing_listVersionAdded", | ||
3608 | StringProp(), | ||
3609 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/listVersionAdded", | ||
3610 | compact="expandedlicensing_listVersionAdded", | ||
3611 | ) | ||
3612 | |||
3613 | |||
3614 | # A property name with an associated value. | ||
3615 | @register("https://spdx.org/rdf/3.0.1/terms/Extension/CdxPropertyEntry", compact_type="extension_CdxPropertyEntry", abstract=False) | ||
3616 | class extension_CdxPropertyEntry(SHACLObject): | ||
3617 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3618 | NAMED_INDIVIDUALS = { | ||
3619 | } | ||
3620 | |||
3621 | @classmethod | ||
3622 | def _register_props(cls): | ||
3623 | super()._register_props() | ||
3624 | # A name used in a CdxPropertyEntry name-value pair. | ||
3625 | cls._add_property( | ||
3626 | "extension_cdxPropName", | ||
3627 | StringProp(), | ||
3628 | iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxPropName", | ||
3629 | min_count=1, | ||
3630 | compact="extension_cdxPropName", | ||
3631 | ) | ||
3632 | # A value used in a CdxPropertyEntry name-value pair. | ||
3633 | cls._add_property( | ||
3634 | "extension_cdxPropValue", | ||
3635 | StringProp(), | ||
3636 | iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxPropValue", | ||
3637 | compact="extension_cdxPropValue", | ||
3638 | ) | ||
3639 | |||
3640 | |||
3641 | # A characterization of some aspect of an Element that is associated with the Element in a generalized fashion. | ||
3642 | @register("https://spdx.org/rdf/3.0.1/terms/Extension/Extension", compact_type="extension_Extension", abstract=True) | ||
3643 | class extension_Extension(SHACLExtensibleObject, SHACLObject): | ||
3644 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3645 | NAMED_INDIVIDUALS = { | ||
3646 | } | ||
3647 | |||
3648 | |||
3649 | # Specifies the CVSS base, temporal, threat, or environmental severity type. | ||
3650 | @register("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType", compact_type="security_CvssSeverityType", abstract=False) | ||
3651 | class security_CvssSeverityType(SHACLObject): | ||
3652 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3653 | NAMED_INDIVIDUALS = { | ||
3654 | "critical": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical", | ||
3655 | "high": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high", | ||
3656 | "low": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low", | ||
3657 | "medium": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium", | ||
3658 | "none": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none", | ||
3659 | } | ||
3660 | # When a CVSS score is between 9.0 - 10.0 | ||
3661 | critical = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical" | ||
3662 | # When a CVSS score is between 7.0 - 8.9 | ||
3663 | high = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high" | ||
3664 | # When a CVSS score is between 0.1 - 3.9 | ||
3665 | low = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low" | ||
3666 | # When a CVSS score is between 4.0 - 6.9 | ||
3667 | medium = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium" | ||
3668 | # When a CVSS score is 0.0 | ||
3669 | none = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none" | ||
3670 | |||
3671 | |||
3672 | # Specifies the exploit catalog type. | ||
3673 | @register("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType", compact_type="security_ExploitCatalogType", abstract=False) | ||
3674 | class security_ExploitCatalogType(SHACLObject): | ||
3675 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3676 | NAMED_INDIVIDUALS = { | ||
3677 | "kev": "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev", | ||
3678 | "other": "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other", | ||
3679 | } | ||
3680 | # CISA's Known Exploited Vulnerability (KEV) Catalog | ||
3681 | kev = "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev" | ||
3682 | # Other exploit catalogs | ||
3683 | other = "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other" | ||
3684 | |||
3685 | |||
3686 | # Specifies the SSVC decision type. | ||
3687 | @register("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType", compact_type="security_SsvcDecisionType", abstract=False) | ||
3688 | class security_SsvcDecisionType(SHACLObject): | ||
3689 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3690 | NAMED_INDIVIDUALS = { | ||
3691 | "act": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act", | ||
3692 | "attend": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend", | ||
3693 | "track": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track", | ||
3694 | "trackStar": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar", | ||
3695 | } | ||
3696 | # The vulnerability requires attention from the organization's internal, supervisory-level and leadership-level individuals. Necessary actions include requesting assistance or information about the vulnerability, as well as publishing a notification either internally and/or externally. Typically, internal groups would meet to determine the overall response and then execute agreed upon actions. CISA recommends remediating Act vulnerabilities as soon as possible. | ||
3697 | act = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act" | ||
3698 | # The vulnerability requires attention from the organization's internal, supervisory-level individuals. Necessary actions include requesting assistance or information about the vulnerability, and may involve publishing a notification either internally and/or externally. CISA recommends remediating Attend vulnerabilities sooner than standard update timelines. | ||
3699 | attend = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend" | ||
3700 | # The vulnerability does not require action at this time. The organization would continue to track the vulnerability and reassess it if new information becomes available. CISA recommends remediating Track vulnerabilities within standard update timelines. | ||
3701 | track = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track" | ||
3702 | # ("Track\*" in the SSVC spec) The vulnerability contains specific characteristics that may require closer monitoring for changes. CISA recommends remediating Track\* vulnerabilities within standard update timelines. | ||
3703 | trackStar = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar" | ||
3704 | |||
3705 | |||
3706 | # Specifies the VEX justification type. | ||
3707 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType", compact_type="security_VexJustificationType", abstract=False) | ||
3708 | class security_VexJustificationType(SHACLObject): | ||
3709 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3710 | NAMED_INDIVIDUALS = { | ||
3711 | "componentNotPresent": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent", | ||
3712 | "inlineMitigationsAlreadyExist": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist", | ||
3713 | "vulnerableCodeCannotBeControlledByAdversary": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary", | ||
3714 | "vulnerableCodeNotInExecutePath": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath", | ||
3715 | "vulnerableCodeNotPresent": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent", | ||
3716 | } | ||
3717 | # The software is not affected because the vulnerable component is not in the product. | ||
3718 | componentNotPresent = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent" | ||
3719 | # Built-in inline controls or mitigations prevent an adversary from leveraging the vulnerability. | ||
3720 | inlineMitigationsAlreadyExist = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist" | ||
3721 | # The vulnerable component is present, and the component contains the vulnerable code. However, vulnerable code is used in such a way that an attacker cannot mount any anticipated attack. | ||
3722 | vulnerableCodeCannotBeControlledByAdversary = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary" | ||
3723 | # The affected code is not reachable through the execution of the code, including non-anticipated states of the product. | ||
3724 | vulnerableCodeNotInExecutePath = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath" | ||
3725 | # The product is not affected because the code underlying the vulnerability is not present in the product. | ||
3726 | vulnerableCodeNotPresent = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent" | ||
3727 | |||
3728 | |||
3729 | # Abstract ancestor class for all vulnerability assessments | ||
3730 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VulnAssessmentRelationship", compact_type="security_VulnAssessmentRelationship", abstract=True) | ||
3731 | class security_VulnAssessmentRelationship(Relationship): | ||
3732 | NODE_KIND = NodeKind.IRI | ||
3733 | ID_ALIAS = "spdxId" | ||
3734 | NAMED_INDIVIDUALS = { | ||
3735 | } | ||
3736 | |||
3737 | @classmethod | ||
3738 | def _register_props(cls): | ||
3739 | super()._register_props() | ||
3740 | # Identifies who or what supplied the artifact or VulnAssessmentRelationship | ||
3741 | # referenced by the Element. | ||
3742 | cls._add_property( | ||
3743 | "suppliedBy", | ||
3744 | ObjectProp(Agent, False, context=[ | ||
3745 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
3746 | ],), | ||
3747 | iri="https://spdx.org/rdf/3.0.1/terms/Core/suppliedBy", | ||
3748 | compact="suppliedBy", | ||
3749 | ) | ||
3750 | # Specifies an Element contained in a piece of software where a vulnerability was | ||
3751 | # found. | ||
3752 | cls._add_property( | ||
3753 | "security_assessedElement", | ||
3754 | ObjectProp(software_SoftwareArtifact, False), | ||
3755 | iri="https://spdx.org/rdf/3.0.1/terms/Security/assessedElement", | ||
3756 | compact="security_assessedElement", | ||
3757 | ) | ||
3758 | # Specifies a time when a vulnerability assessment was modified | ||
3759 | cls._add_property( | ||
3760 | "security_modifiedTime", | ||
3761 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
3762 | iri="https://spdx.org/rdf/3.0.1/terms/Security/modifiedTime", | ||
3763 | compact="security_modifiedTime", | ||
3764 | ) | ||
3765 | # Specifies the time when a vulnerability was published. | ||
3766 | cls._add_property( | ||
3767 | "security_publishedTime", | ||
3768 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
3769 | iri="https://spdx.org/rdf/3.0.1/terms/Security/publishedTime", | ||
3770 | compact="security_publishedTime", | ||
3771 | ) | ||
3772 | # Specified the time and date when a vulnerability was withdrawn. | ||
3773 | cls._add_property( | ||
3774 | "security_withdrawnTime", | ||
3775 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
3776 | iri="https://spdx.org/rdf/3.0.1/terms/Security/withdrawnTime", | ||
3777 | compact="security_withdrawnTime", | ||
3778 | ) | ||
3779 | |||
3780 | |||
3781 | # Abstract class representing a license combination consisting of one or more licenses. | ||
3782 | @register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/AnyLicenseInfo", compact_type="simplelicensing_AnyLicenseInfo", abstract=True) | ||
3783 | class simplelicensing_AnyLicenseInfo(Element): | ||
3784 | NODE_KIND = NodeKind.IRI | ||
3785 | ID_ALIAS = "spdxId" | ||
3786 | NAMED_INDIVIDUALS = { | ||
3787 | } | ||
3788 | |||
3789 | |||
3790 | # An SPDX Element containing an SPDX license expression string. | ||
3791 | @register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/LicenseExpression", compact_type="simplelicensing_LicenseExpression", abstract=False) | ||
3792 | class simplelicensing_LicenseExpression(simplelicensing_AnyLicenseInfo): | ||
3793 | NODE_KIND = NodeKind.IRI | ||
3794 | ID_ALIAS = "spdxId" | ||
3795 | NAMED_INDIVIDUALS = { | ||
3796 | } | ||
3797 | |||
3798 | @classmethod | ||
3799 | def _register_props(cls): | ||
3800 | super()._register_props() | ||
3801 | # Maps a LicenseRef or AdditionRef string for a Custom License or a Custom | ||
3802 | # License Addition to its URI ID. | ||
3803 | cls._add_property( | ||
3804 | "simplelicensing_customIdToUri", | ||
3805 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
3806 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/customIdToUri", | ||
3807 | compact="simplelicensing_customIdToUri", | ||
3808 | ) | ||
3809 | # A string in the license expression format. | ||
3810 | cls._add_property( | ||
3811 | "simplelicensing_licenseExpression", | ||
3812 | StringProp(), | ||
3813 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseExpression", | ||
3814 | min_count=1, | ||
3815 | compact="simplelicensing_licenseExpression", | ||
3816 | ) | ||
3817 | # The version of the SPDX License List used in the license expression. | ||
3818 | cls._add_property( | ||
3819 | "simplelicensing_licenseListVersion", | ||
3820 | StringProp(pattern=r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$",), | ||
3821 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseListVersion", | ||
3822 | compact="simplelicensing_licenseListVersion", | ||
3823 | ) | ||
3824 | |||
3825 | |||
3826 | # A license or addition that is not listed on the SPDX License List. | ||
3827 | @register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/SimpleLicensingText", compact_type="simplelicensing_SimpleLicensingText", abstract=False) | ||
3828 | class simplelicensing_SimpleLicensingText(Element): | ||
3829 | NODE_KIND = NodeKind.IRI | ||
3830 | ID_ALIAS = "spdxId" | ||
3831 | NAMED_INDIVIDUALS = { | ||
3832 | } | ||
3833 | |||
3834 | @classmethod | ||
3835 | def _register_props(cls): | ||
3836 | super()._register_props() | ||
3837 | # Identifies the full text of a License or Addition. | ||
3838 | cls._add_property( | ||
3839 | "simplelicensing_licenseText", | ||
3840 | StringProp(), | ||
3841 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseText", | ||
3842 | min_count=1, | ||
3843 | compact="simplelicensing_licenseText", | ||
3844 | ) | ||
3845 | |||
3846 | |||
3847 | # A canonical, unique, immutable identifier | ||
3848 | @register("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifier", compact_type="software_ContentIdentifier", abstract=False) | ||
3849 | class software_ContentIdentifier(IntegrityMethod): | ||
3850 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3851 | NAMED_INDIVIDUALS = { | ||
3852 | } | ||
3853 | |||
3854 | @classmethod | ||
3855 | def _register_props(cls): | ||
3856 | super()._register_props() | ||
3857 | # Specifies the type of the content identifier. | ||
3858 | cls._add_property( | ||
3859 | "software_contentIdentifierType", | ||
3860 | EnumProp([ | ||
3861 | ("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid", "gitoid"), | ||
3862 | ("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid", "swhid"), | ||
3863 | ]), | ||
3864 | iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifierType", | ||
3865 | min_count=1, | ||
3866 | compact="software_contentIdentifierType", | ||
3867 | ) | ||
3868 | # Specifies the value of the content identifier. | ||
3869 | cls._add_property( | ||
3870 | "software_contentIdentifierValue", | ||
3871 | AnyURIProp(), | ||
3872 | iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifierValue", | ||
3873 | min_count=1, | ||
3874 | compact="software_contentIdentifierValue", | ||
3875 | ) | ||
3876 | |||
3877 | |||
3878 | # Specifies the type of a content identifier. | ||
3879 | @register("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType", compact_type="software_ContentIdentifierType", abstract=False) | ||
3880 | class software_ContentIdentifierType(SHACLObject): | ||
3881 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3882 | NAMED_INDIVIDUALS = { | ||
3883 | "gitoid": "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid", | ||
3884 | "swhid": "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid", | ||
3885 | } | ||
3886 | # [Gitoid](https://www.iana.org/assignments/uri-schemes/prov/gitoid), stands for [Git Object ID](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects). A gitoid of type blob is a unique hash of a binary artifact. A gitoid may represent either an [Artifact Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-identifier-types) for the software artifact or an [Input Manifest Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#input-manifest-identifier) for the software artifact's associated [Artifact Input Manifest](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-input-manifest); this ambiguity exists because the Artifact Input Manifest is itself an artifact, and the gitoid of that artifact is its valid identifier. Gitoids calculated on software artifacts (Snippet, File, or Package Elements) should be recorded in the SPDX 3.0 SoftwareArtifact's contentIdentifier property. Gitoids calculated on the Artifact Input Manifest (Input Manifest Identifier) should be recorded in the SPDX 3.0 Element's externalIdentifier property. See [OmniBOR Specification](https://github.com/omnibor/spec/), a minimalistic specification for describing software [Artifact Dependency Graphs](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-dependency-graph-adg). | ||
3887 | gitoid = "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid" | ||
3888 | # SoftWare Hash IDentifier, a persistent intrinsic identifier for digital artifacts, such as files, trees (also known as directories or folders), commits, and other objects typically found in version control systems. The format of the identifiers is defined in the [SWHID specification](https://www.swhid.org/specification/v1.1/4.Syntax) (ISO/IEC DIS 18670). They typically look like `swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`. | ||
3889 | swhid = "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid" | ||
3890 | |||
3891 | |||
3892 | # Enumeration of the different kinds of SPDX file. | ||
3893 | @register("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType", compact_type="software_FileKindType", abstract=False) | ||
3894 | class software_FileKindType(SHACLObject): | ||
3895 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3896 | NAMED_INDIVIDUALS = { | ||
3897 | "directory": "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory", | ||
3898 | "file": "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file", | ||
3899 | } | ||
3900 | # The file represents a directory and all content stored in that directory. | ||
3901 | directory = "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory" | ||
3902 | # The file represents a single file (default). | ||
3903 | file = "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file" | ||
3904 | |||
3905 | |||
3906 | # Provides a set of values to be used to describe the common types of SBOMs that | ||
3907 | # tools may create. | ||
3908 | @register("https://spdx.org/rdf/3.0.1/terms/Software/SbomType", compact_type="software_SbomType", abstract=False) | ||
3909 | class software_SbomType(SHACLObject): | ||
3910 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3911 | NAMED_INDIVIDUALS = { | ||
3912 | "analyzed": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed", | ||
3913 | "build": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build", | ||
3914 | "deployed": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed", | ||
3915 | "design": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design", | ||
3916 | "runtime": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime", | ||
3917 | "source": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source", | ||
3918 | } | ||
3919 | # SBOM generated through analysis of artifacts (e.g., executables, packages, containers, and virtual machine images) after its build. Such analysis generally requires a variety of heuristics. In some contexts, this may also be referred to as a "3rd party" SBOM. | ||
3920 | analyzed = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed" | ||
3921 | # SBOM generated as part of the process of building the software to create a releasable artifact (e.g., executable or package) from data such as source files, dependencies, built components, build process ephemeral data, and other SBOMs. | ||
3922 | build = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build" | ||
3923 | # SBOM provides an inventory of software that is present on a system. This may be an assembly of other SBOMs that combines analysis of configuration options, and examination of execution behavior in a (potentially simulated) deployment environment. | ||
3924 | deployed = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed" | ||
3925 | # SBOM of intended, planned software project or product with included components (some of which may not yet exist) for a new software artifact. | ||
3926 | design = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design" | ||
3927 | # SBOM generated through instrumenting the system running the software, to capture only components present in the system, as well as external call-outs or dynamically loaded components. In some contexts, this may also be referred to as an "Instrumented" or "Dynamic" SBOM. | ||
3928 | runtime = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime" | ||
3929 | # SBOM created directly from the development environment, source files, and included dependencies used to build an product artifact. | ||
3930 | source = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source" | ||
3931 | |||
3932 | |||
3933 | # Provides information about the primary purpose of an Element. | ||
3934 | @register("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose", compact_type="software_SoftwarePurpose", abstract=False) | ||
3935 | class software_SoftwarePurpose(SHACLObject): | ||
3936 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3937 | NAMED_INDIVIDUALS = { | ||
3938 | "application": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application", | ||
3939 | "archive": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive", | ||
3940 | "bom": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom", | ||
3941 | "configuration": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration", | ||
3942 | "container": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container", | ||
3943 | "data": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data", | ||
3944 | "device": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device", | ||
3945 | "deviceDriver": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver", | ||
3946 | "diskImage": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage", | ||
3947 | "documentation": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation", | ||
3948 | "evidence": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence", | ||
3949 | "executable": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable", | ||
3950 | "file": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file", | ||
3951 | "filesystemImage": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage", | ||
3952 | "firmware": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware", | ||
3953 | "framework": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework", | ||
3954 | "install": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install", | ||
3955 | "library": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library", | ||
3956 | "manifest": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest", | ||
3957 | "model": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model", | ||
3958 | "module": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module", | ||
3959 | "operatingSystem": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem", | ||
3960 | "other": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other", | ||
3961 | "patch": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch", | ||
3962 | "platform": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform", | ||
3963 | "requirement": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement", | ||
3964 | "source": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source", | ||
3965 | "specification": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification", | ||
3966 | "test": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test", | ||
3967 | } | ||
3968 | # The Element is a software application. | ||
3969 | application = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application" | ||
3970 | # The Element is an archived collection of one or more files (.tar, .zip, etc.). | ||
3971 | archive = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive" | ||
3972 | # The Element is a bill of materials. | ||
3973 | bom = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom" | ||
3974 | # The Element is configuration data. | ||
3975 | configuration = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration" | ||
3976 | # The Element is a container image which can be used by a container runtime application. | ||
3977 | container = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container" | ||
3978 | # The Element is data. | ||
3979 | data = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data" | ||
3980 | # The Element refers to a chipset, processor, or electronic board. | ||
3981 | device = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device" | ||
3982 | # The Element represents software that controls hardware devices. | ||
3983 | deviceDriver = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver" | ||
3984 | # The Element refers to a disk image that can be written to a disk, booted in a VM, etc. A disk image typically contains most or all of the components necessary to boot, such as bootloaders, kernels, firmware, userspace, etc. | ||
3985 | diskImage = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage" | ||
3986 | # The Element is documentation. | ||
3987 | documentation = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation" | ||
3988 | # The Element is the evidence that a specification or requirement has been fulfilled. | ||
3989 | evidence = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence" | ||
3990 | # The Element is an Artifact that can be run on a computer. | ||
3991 | executable = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable" | ||
3992 | # The Element is a single file which can be independently distributed (configuration file, statically linked binary, Kubernetes deployment, etc.). | ||
3993 | file = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file" | ||
3994 | # The Element is a file system image that can be written to a disk (or virtual) partition. | ||
3995 | filesystemImage = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage" | ||
3996 | # The Element provides low level control over a device's hardware. | ||
3997 | firmware = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware" | ||
3998 | # The Element is a software framework. | ||
3999 | framework = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework" | ||
4000 | # The Element is used to install software on disk. | ||
4001 | install = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install" | ||
4002 | # The Element is a software library. | ||
4003 | library = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library" | ||
4004 | # The Element is a software manifest. | ||
4005 | manifest = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest" | ||
4006 | # The Element is a machine learning or artificial intelligence model. | ||
4007 | model = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model" | ||
4008 | # The Element is a module of a piece of software. | ||
4009 | module = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module" | ||
4010 | # The Element is an operating system. | ||
4011 | operatingSystem = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem" | ||
4012 | # The Element doesn't fit into any of the other categories. | ||
4013 | other = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other" | ||
4014 | # The Element contains a set of changes to update, fix, or improve another Element. | ||
4015 | patch = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch" | ||
4016 | # The Element represents a runtime environment. | ||
4017 | platform = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform" | ||
4018 | # The Element provides a requirement needed as input for another Element. | ||
4019 | requirement = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement" | ||
4020 | # The Element is a single or a collection of source files. | ||
4021 | source = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source" | ||
4022 | # The Element is a plan, guideline or strategy how to create, perform or analyze an application. | ||
4023 | specification = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification" | ||
4024 | # The Element is a test used to verify functionality on an software element. | ||
4025 | test = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test" | ||
4026 | |||
4027 | |||
4028 | # Class that describes a build instance of software/artifacts. | ||
4029 | @register("https://spdx.org/rdf/3.0.1/terms/Build/Build", compact_type="build_Build", abstract=False) | ||
4030 | class build_Build(Element): | ||
4031 | NODE_KIND = NodeKind.IRI | ||
4032 | ID_ALIAS = "spdxId" | ||
4033 | NAMED_INDIVIDUALS = { | ||
4034 | } | ||
4035 | |||
4036 | @classmethod | ||
4037 | def _register_props(cls): | ||
4038 | super()._register_props() | ||
4039 | # Property that describes the time at which a build stops. | ||
4040 | cls._add_property( | ||
4041 | "build_buildEndTime", | ||
4042 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4043 | iri="https://spdx.org/rdf/3.0.1/terms/Build/buildEndTime", | ||
4044 | compact="build_buildEndTime", | ||
4045 | ) | ||
4046 | # A buildId is a locally unique identifier used by a builder to identify a unique | ||
4047 | # instance of a build produced by it. | ||
4048 | cls._add_property( | ||
4049 | "build_buildId", | ||
4050 | StringProp(), | ||
4051 | iri="https://spdx.org/rdf/3.0.1/terms/Build/buildId", | ||
4052 | compact="build_buildId", | ||
4053 | ) | ||
4054 | # Property describing the start time of a build. | ||
4055 | cls._add_property( | ||
4056 | "build_buildStartTime", | ||
4057 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4058 | iri="https://spdx.org/rdf/3.0.1/terms/Build/buildStartTime", | ||
4059 | compact="build_buildStartTime", | ||
4060 | ) | ||
4061 | # A buildType is a hint that is used to indicate the toolchain, platform, or | ||
4062 | # infrastructure that the build was invoked on. | ||
4063 | cls._add_property( | ||
4064 | "build_buildType", | ||
4065 | AnyURIProp(), | ||
4066 | iri="https://spdx.org/rdf/3.0.1/terms/Build/buildType", | ||
4067 | min_count=1, | ||
4068 | compact="build_buildType", | ||
4069 | ) | ||
4070 | # Property that describes the digest of the build configuration file used to | ||
4071 | # invoke a build. | ||
4072 | cls._add_property( | ||
4073 | "build_configSourceDigest", | ||
4074 | ListProp(ObjectProp(Hash, False)), | ||
4075 | iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceDigest", | ||
4076 | compact="build_configSourceDigest", | ||
4077 | ) | ||
4078 | # Property describes the invocation entrypoint of a build. | ||
4079 | cls._add_property( | ||
4080 | "build_configSourceEntrypoint", | ||
4081 | ListProp(StringProp()), | ||
4082 | iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceEntrypoint", | ||
4083 | compact="build_configSourceEntrypoint", | ||
4084 | ) | ||
4085 | # Property that describes the URI of the build configuration source file. | ||
4086 | cls._add_property( | ||
4087 | "build_configSourceUri", | ||
4088 | ListProp(AnyURIProp()), | ||
4089 | iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceUri", | ||
4090 | compact="build_configSourceUri", | ||
4091 | ) | ||
4092 | # Property describing the session in which a build is invoked. | ||
4093 | cls._add_property( | ||
4094 | "build_environment", | ||
4095 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
4096 | iri="https://spdx.org/rdf/3.0.1/terms/Build/environment", | ||
4097 | compact="build_environment", | ||
4098 | ) | ||
4099 | # Property describing a parameter used in an instance of a build. | ||
4100 | cls._add_property( | ||
4101 | "build_parameter", | ||
4102 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
4103 | iri="https://spdx.org/rdf/3.0.1/terms/Build/parameter", | ||
4104 | compact="build_parameter", | ||
4105 | ) | ||
4106 | |||
4107 | |||
4108 | # Agent represents anything with the potential to act on a system. | ||
4109 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Agent", compact_type="Agent", abstract=False) | ||
4110 | class Agent(Element): | ||
4111 | NODE_KIND = NodeKind.IRI | ||
4112 | ID_ALIAS = "spdxId" | ||
4113 | NAMED_INDIVIDUALS = { | ||
4114 | } | ||
4115 | |||
4116 | |||
4117 | # An assertion made in relation to one or more elements. | ||
4118 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Annotation", compact_type="Annotation", abstract=False) | ||
4119 | class Annotation(Element): | ||
4120 | NODE_KIND = NodeKind.IRI | ||
4121 | ID_ALIAS = "spdxId" | ||
4122 | NAMED_INDIVIDUALS = { | ||
4123 | } | ||
4124 | |||
4125 | @classmethod | ||
4126 | def _register_props(cls): | ||
4127 | super()._register_props() | ||
4128 | # Describes the type of annotation. | ||
4129 | cls._add_property( | ||
4130 | "annotationType", | ||
4131 | EnumProp([ | ||
4132 | ("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other", "other"), | ||
4133 | ("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review", "review"), | ||
4134 | ]), | ||
4135 | iri="https://spdx.org/rdf/3.0.1/terms/Core/annotationType", | ||
4136 | min_count=1, | ||
4137 | compact="annotationType", | ||
4138 | ) | ||
4139 | # Provides information about the content type of an Element or a Property. | ||
4140 | cls._add_property( | ||
4141 | "contentType", | ||
4142 | StringProp(pattern=r"^[^\/]+\/[^\/]+$",), | ||
4143 | iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType", | ||
4144 | compact="contentType", | ||
4145 | ) | ||
4146 | # Commentary on an assertion that an annotator has made. | ||
4147 | cls._add_property( | ||
4148 | "statement", | ||
4149 | StringProp(), | ||
4150 | iri="https://spdx.org/rdf/3.0.1/terms/Core/statement", | ||
4151 | compact="statement", | ||
4152 | ) | ||
4153 | # An Element an annotator has made an assertion about. | ||
4154 | cls._add_property( | ||
4155 | "subject", | ||
4156 | ObjectProp(Element, True, context=[ | ||
4157 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
4158 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
4159 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
4160 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
4161 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
4162 | ],), | ||
4163 | iri="https://spdx.org/rdf/3.0.1/terms/Core/subject", | ||
4164 | min_count=1, | ||
4165 | compact="subject", | ||
4166 | ) | ||
4167 | |||
4168 | |||
4169 | # A distinct article or unit within the digital domain. | ||
4170 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Artifact", compact_type="Artifact", abstract=True) | ||
4171 | class Artifact(Element): | ||
4172 | NODE_KIND = NodeKind.IRI | ||
4173 | ID_ALIAS = "spdxId" | ||
4174 | NAMED_INDIVIDUALS = { | ||
4175 | } | ||
4176 | |||
4177 | @classmethod | ||
4178 | def _register_props(cls): | ||
4179 | super()._register_props() | ||
4180 | # Specifies the time an artifact was built. | ||
4181 | cls._add_property( | ||
4182 | "builtTime", | ||
4183 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4184 | iri="https://spdx.org/rdf/3.0.1/terms/Core/builtTime", | ||
4185 | compact="builtTime", | ||
4186 | ) | ||
4187 | # Identifies from where or whom the Element originally came. | ||
4188 | cls._add_property( | ||
4189 | "originatedBy", | ||
4190 | ListProp(ObjectProp(Agent, False, context=[ | ||
4191 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
4192 | ],)), | ||
4193 | iri="https://spdx.org/rdf/3.0.1/terms/Core/originatedBy", | ||
4194 | compact="originatedBy", | ||
4195 | ) | ||
4196 | # Specifies the time an artifact was released. | ||
4197 | cls._add_property( | ||
4198 | "releaseTime", | ||
4199 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4200 | iri="https://spdx.org/rdf/3.0.1/terms/Core/releaseTime", | ||
4201 | compact="releaseTime", | ||
4202 | ) | ||
4203 | # The name of a relevant standard that may apply to an artifact. | ||
4204 | cls._add_property( | ||
4205 | "standardName", | ||
4206 | ListProp(StringProp()), | ||
4207 | iri="https://spdx.org/rdf/3.0.1/terms/Core/standardName", | ||
4208 | compact="standardName", | ||
4209 | ) | ||
4210 | # Identifies who or what supplied the artifact or VulnAssessmentRelationship | ||
4211 | # referenced by the Element. | ||
4212 | cls._add_property( | ||
4213 | "suppliedBy", | ||
4214 | ObjectProp(Agent, False, context=[ | ||
4215 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
4216 | ],), | ||
4217 | iri="https://spdx.org/rdf/3.0.1/terms/Core/suppliedBy", | ||
4218 | compact="suppliedBy", | ||
4219 | ) | ||
4220 | # Specifies the level of support associated with an artifact. | ||
4221 | cls._add_property( | ||
4222 | "supportLevel", | ||
4223 | ListProp(EnumProp([ | ||
4224 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed", "deployed"), | ||
4225 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development", "development"), | ||
4226 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport", "endOfSupport"), | ||
4227 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport", "limitedSupport"), | ||
4228 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion", "noAssertion"), | ||
4229 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport", "noSupport"), | ||
4230 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support", "support"), | ||
4231 | ])), | ||
4232 | iri="https://spdx.org/rdf/3.0.1/terms/Core/supportLevel", | ||
4233 | compact="supportLevel", | ||
4234 | ) | ||
4235 | # Specifies until when the artifact can be used before its usage needs to be | ||
4236 | # reassessed. | ||
4237 | cls._add_property( | ||
4238 | "validUntilTime", | ||
4239 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4240 | iri="https://spdx.org/rdf/3.0.1/terms/Core/validUntilTime", | ||
4241 | compact="validUntilTime", | ||
4242 | ) | ||
4243 | |||
4244 | |||
4245 | # A collection of Elements that have a shared context. | ||
4246 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Bundle", compact_type="Bundle", abstract=False) | ||
4247 | class Bundle(ElementCollection): | ||
4248 | NODE_KIND = NodeKind.IRI | ||
4249 | ID_ALIAS = "spdxId" | ||
4250 | NAMED_INDIVIDUALS = { | ||
4251 | } | ||
4252 | |||
4253 | @classmethod | ||
4254 | def _register_props(cls): | ||
4255 | super()._register_props() | ||
4256 | # Gives information about the circumstances or unifying properties | ||
4257 | # that Elements of the bundle have been assembled under. | ||
4258 | cls._add_property( | ||
4259 | "context", | ||
4260 | StringProp(), | ||
4261 | iri="https://spdx.org/rdf/3.0.1/terms/Core/context", | ||
4262 | compact="context", | ||
4263 | ) | ||
4264 | |||
4265 | |||
4266 | # A mathematically calculated representation of a grouping of data. | ||
4267 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Hash", compact_type="Hash", abstract=False) | ||
4268 | class Hash(IntegrityMethod): | ||
4269 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
4270 | NAMED_INDIVIDUALS = { | ||
4271 | } | ||
4272 | |||
4273 | @classmethod | ||
4274 | def _register_props(cls): | ||
4275 | super()._register_props() | ||
4276 | # Specifies the algorithm used for calculating the hash value. | ||
4277 | cls._add_property( | ||
4278 | "algorithm", | ||
4279 | EnumProp([ | ||
4280 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32", "adler32"), | ||
4281 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256", "blake2b256"), | ||
4282 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384", "blake2b384"), | ||
4283 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512", "blake2b512"), | ||
4284 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3", "blake3"), | ||
4285 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium", "crystalsDilithium"), | ||
4286 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber", "crystalsKyber"), | ||
4287 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon", "falcon"), | ||
4288 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2", "md2"), | ||
4289 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4", "md4"), | ||
4290 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5", "md5"), | ||
4291 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6", "md6"), | ||
4292 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other", "other"), | ||
4293 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1", "sha1"), | ||
4294 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224", "sha224"), | ||
4295 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256", "sha256"), | ||
4296 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384", "sha384"), | ||
4297 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224", "sha3_224"), | ||
4298 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256", "sha3_256"), | ||
4299 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384", "sha3_384"), | ||
4300 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512", "sha3_512"), | ||
4301 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512", "sha512"), | ||
4302 | ]), | ||
4303 | iri="https://spdx.org/rdf/3.0.1/terms/Core/algorithm", | ||
4304 | min_count=1, | ||
4305 | compact="algorithm", | ||
4306 | ) | ||
4307 | # The result of applying a hash algorithm to an Element. | ||
4308 | cls._add_property( | ||
4309 | "hashValue", | ||
4310 | StringProp(), | ||
4311 | iri="https://spdx.org/rdf/3.0.1/terms/Core/hashValue", | ||
4312 | min_count=1, | ||
4313 | compact="hashValue", | ||
4314 | ) | ||
4315 | |||
4316 | |||
4317 | # Provide context for a relationship that occurs in the lifecycle. | ||
4318 | @register("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopedRelationship", compact_type="LifecycleScopedRelationship", abstract=False) | ||
4319 | class LifecycleScopedRelationship(Relationship): | ||
4320 | NODE_KIND = NodeKind.IRI | ||
4321 | ID_ALIAS = "spdxId" | ||
4322 | NAMED_INDIVIDUALS = { | ||
4323 | } | ||
4324 | |||
4325 | @classmethod | ||
4326 | def _register_props(cls): | ||
4327 | super()._register_props() | ||
4328 | # Capture the scope of information about a specific relationship between elements. | ||
4329 | cls._add_property( | ||
4330 | "scope", | ||
4331 | EnumProp([ | ||
4332 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build", "build"), | ||
4333 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design", "design"), | ||
4334 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development", "development"), | ||
4335 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other", "other"), | ||
4336 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime", "runtime"), | ||
4337 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test", "test"), | ||
4338 | ]), | ||
4339 | iri="https://spdx.org/rdf/3.0.1/terms/Core/scope", | ||
4340 | compact="scope", | ||
4341 | ) | ||
4342 | |||
4343 | |||
4344 | # A group of people who work together in an organized way for a shared purpose. | ||
4345 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Organization", compact_type="Organization", abstract=False) | ||
4346 | class Organization(Agent): | ||
4347 | NODE_KIND = NodeKind.IRI | ||
4348 | ID_ALIAS = "spdxId" | ||
4349 | NAMED_INDIVIDUALS = { | ||
4350 | "SpdxOrganization": "https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", | ||
4351 | } | ||
4352 | # An Organization representing the SPDX Project. | ||
4353 | SpdxOrganization = "https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization" | ||
4354 | |||
4355 | |||
4356 | # An individual human being. | ||
4357 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Person", compact_type="Person", abstract=False) | ||
4358 | class Person(Agent): | ||
4359 | NODE_KIND = NodeKind.IRI | ||
4360 | ID_ALIAS = "spdxId" | ||
4361 | NAMED_INDIVIDUALS = { | ||
4362 | } | ||
4363 | |||
4364 | |||
4365 | # A software agent. | ||
4366 | @register("https://spdx.org/rdf/3.0.1/terms/Core/SoftwareAgent", compact_type="SoftwareAgent", abstract=False) | ||
4367 | class SoftwareAgent(Agent): | ||
4368 | NODE_KIND = NodeKind.IRI | ||
4369 | ID_ALIAS = "spdxId" | ||
4370 | NAMED_INDIVIDUALS = { | ||
4371 | } | ||
4372 | |||
4373 | |||
4374 | # Portion of an AnyLicenseInfo representing a set of licensing information | ||
4375 | # where all elements apply. | ||
4376 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ConjunctiveLicenseSet", compact_type="expandedlicensing_ConjunctiveLicenseSet", abstract=False) | ||
4377 | class expandedlicensing_ConjunctiveLicenseSet(simplelicensing_AnyLicenseInfo): | ||
4378 | NODE_KIND = NodeKind.IRI | ||
4379 | ID_ALIAS = "spdxId" | ||
4380 | NAMED_INDIVIDUALS = { | ||
4381 | } | ||
4382 | |||
4383 | @classmethod | ||
4384 | def _register_props(cls): | ||
4385 | super()._register_props() | ||
4386 | # A license expression participating in a license set. | ||
4387 | cls._add_property( | ||
4388 | "expandedlicensing_member", | ||
4389 | ListProp(ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[ | ||
4390 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
4391 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
4392 | ],)), | ||
4393 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/member", | ||
4394 | min_count=2, | ||
4395 | compact="expandedlicensing_member", | ||
4396 | ) | ||
4397 | |||
4398 | |||
4399 | # A license addition that is not listed on the SPDX Exceptions List. | ||
4400 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/CustomLicenseAddition", compact_type="expandedlicensing_CustomLicenseAddition", abstract=False) | ||
4401 | class expandedlicensing_CustomLicenseAddition(expandedlicensing_LicenseAddition): | ||
4402 | NODE_KIND = NodeKind.IRI | ||
4403 | ID_ALIAS = "spdxId" | ||
4404 | NAMED_INDIVIDUALS = { | ||
4405 | } | ||
4406 | |||
4407 | |||
4408 | # Portion of an AnyLicenseInfo representing a set of licensing information where | ||
4409 | # only one of the elements applies. | ||
4410 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/DisjunctiveLicenseSet", compact_type="expandedlicensing_DisjunctiveLicenseSet", abstract=False) | ||
4411 | class expandedlicensing_DisjunctiveLicenseSet(simplelicensing_AnyLicenseInfo): | ||
4412 | NODE_KIND = NodeKind.IRI | ||
4413 | ID_ALIAS = "spdxId" | ||
4414 | NAMED_INDIVIDUALS = { | ||
4415 | } | ||
4416 | |||
4417 | @classmethod | ||
4418 | def _register_props(cls): | ||
4419 | super()._register_props() | ||
4420 | # A license expression participating in a license set. | ||
4421 | cls._add_property( | ||
4422 | "expandedlicensing_member", | ||
4423 | ListProp(ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[ | ||
4424 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
4425 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
4426 | ],)), | ||
4427 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/member", | ||
4428 | min_count=2, | ||
4429 | compact="expandedlicensing_member", | ||
4430 | ) | ||
4431 | |||
4432 | |||
4433 | # Abstract class representing a License or an OrLaterOperator. | ||
4434 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ExtendableLicense", compact_type="expandedlicensing_ExtendableLicense", abstract=True) | ||
4435 | class expandedlicensing_ExtendableLicense(simplelicensing_AnyLicenseInfo): | ||
4436 | NODE_KIND = NodeKind.IRI | ||
4437 | ID_ALIAS = "spdxId" | ||
4438 | NAMED_INDIVIDUALS = { | ||
4439 | } | ||
4440 | |||
4441 | |||
4442 | # A concrete subclass of AnyLicenseInfo used by Individuals in the | ||
4443 | # ExpandedLicensing profile. | ||
4444 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/IndividualLicensingInfo", compact_type="expandedlicensing_IndividualLicensingInfo", abstract=False) | ||
4445 | class expandedlicensing_IndividualLicensingInfo(simplelicensing_AnyLicenseInfo): | ||
4446 | NODE_KIND = NodeKind.IRI | ||
4447 | ID_ALIAS = "spdxId" | ||
4448 | NAMED_INDIVIDUALS = { | ||
4449 | "NoAssertionLicense": "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", | ||
4450 | "NoneLicense": "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", | ||
4451 | } | ||
4452 | # An Individual Value for License when no assertion can be made about its actual | ||
4453 | # value. | ||
4454 | NoAssertionLicense = "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense" | ||
4455 | # An Individual Value for License where the SPDX data creator determines that no | ||
4456 | # license is present. | ||
4457 | NoneLicense = "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense" | ||
4458 | |||
4459 | |||
4460 | # Abstract class for the portion of an AnyLicenseInfo representing a license. | ||
4461 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/License", compact_type="expandedlicensing_License", abstract=True) | ||
4462 | class expandedlicensing_License(expandedlicensing_ExtendableLicense): | ||
4463 | NODE_KIND = NodeKind.IRI | ||
4464 | ID_ALIAS = "spdxId" | ||
4465 | NAMED_INDIVIDUALS = { | ||
4466 | } | ||
4467 | |||
4468 | @classmethod | ||
4469 | def _register_props(cls): | ||
4470 | super()._register_props() | ||
4471 | # Specifies whether a license or additional text identifier has been marked as | ||
4472 | # deprecated. | ||
4473 | cls._add_property( | ||
4474 | "expandedlicensing_isDeprecatedLicenseId", | ||
4475 | BooleanProp(), | ||
4476 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isDeprecatedLicenseId", | ||
4477 | compact="expandedlicensing_isDeprecatedLicenseId", | ||
4478 | ) | ||
4479 | # Specifies whether the License is listed as free by the | ||
4480 | # Free Software Foundation (FSF). | ||
4481 | cls._add_property( | ||
4482 | "expandedlicensing_isFsfLibre", | ||
4483 | BooleanProp(), | ||
4484 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isFsfLibre", | ||
4485 | compact="expandedlicensing_isFsfLibre", | ||
4486 | ) | ||
4487 | # Specifies whether the License is listed as approved by the | ||
4488 | # Open Source Initiative (OSI). | ||
4489 | cls._add_property( | ||
4490 | "expandedlicensing_isOsiApproved", | ||
4491 | BooleanProp(), | ||
4492 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isOsiApproved", | ||
4493 | compact="expandedlicensing_isOsiApproved", | ||
4494 | ) | ||
4495 | # Identifies all the text and metadata associated with a license in the license | ||
4496 | # XML format. | ||
4497 | cls._add_property( | ||
4498 | "expandedlicensing_licenseXml", | ||
4499 | StringProp(), | ||
4500 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/licenseXml", | ||
4501 | compact="expandedlicensing_licenseXml", | ||
4502 | ) | ||
4503 | # Specifies the licenseId that is preferred to be used in place of a deprecated | ||
4504 | # License or LicenseAddition. | ||
4505 | cls._add_property( | ||
4506 | "expandedlicensing_obsoletedBy", | ||
4507 | StringProp(), | ||
4508 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/obsoletedBy", | ||
4509 | compact="expandedlicensing_obsoletedBy", | ||
4510 | ) | ||
4511 | # Contains a URL where the License or LicenseAddition can be found in use. | ||
4512 | cls._add_property( | ||
4513 | "expandedlicensing_seeAlso", | ||
4514 | ListProp(AnyURIProp()), | ||
4515 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/seeAlso", | ||
4516 | compact="expandedlicensing_seeAlso", | ||
4517 | ) | ||
4518 | # Provides a License author's preferred text to indicate that a file is covered | ||
4519 | # by the License. | ||
4520 | cls._add_property( | ||
4521 | "expandedlicensing_standardLicenseHeader", | ||
4522 | StringProp(), | ||
4523 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardLicenseHeader", | ||
4524 | compact="expandedlicensing_standardLicenseHeader", | ||
4525 | ) | ||
4526 | # Identifies the full text of a License, in SPDX templating format. | ||
4527 | cls._add_property( | ||
4528 | "expandedlicensing_standardLicenseTemplate", | ||
4529 | StringProp(), | ||
4530 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardLicenseTemplate", | ||
4531 | compact="expandedlicensing_standardLicenseTemplate", | ||
4532 | ) | ||
4533 | # Identifies the full text of a License or Addition. | ||
4534 | cls._add_property( | ||
4535 | "simplelicensing_licenseText", | ||
4536 | StringProp(), | ||
4537 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseText", | ||
4538 | min_count=1, | ||
4539 | compact="simplelicensing_licenseText", | ||
4540 | ) | ||
4541 | |||
4542 | |||
4543 | # A license that is listed on the SPDX License List. | ||
4544 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ListedLicense", compact_type="expandedlicensing_ListedLicense", abstract=False) | ||
4545 | class expandedlicensing_ListedLicense(expandedlicensing_License): | ||
4546 | NODE_KIND = NodeKind.IRI | ||
4547 | ID_ALIAS = "spdxId" | ||
4548 | NAMED_INDIVIDUALS = { | ||
4549 | } | ||
4550 | |||
4551 | @classmethod | ||
4552 | def _register_props(cls): | ||
4553 | super()._register_props() | ||
4554 | # Specifies the SPDX License List version in which this license or exception | ||
4555 | # identifier was deprecated. | ||
4556 | cls._add_property( | ||
4557 | "expandedlicensing_deprecatedVersion", | ||
4558 | StringProp(), | ||
4559 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/deprecatedVersion", | ||
4560 | compact="expandedlicensing_deprecatedVersion", | ||
4561 | ) | ||
4562 | # Specifies the SPDX License List version in which this ListedLicense or | ||
4563 | # ListedLicenseException identifier was first added. | ||
4564 | cls._add_property( | ||
4565 | "expandedlicensing_listVersionAdded", | ||
4566 | StringProp(), | ||
4567 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/listVersionAdded", | ||
4568 | compact="expandedlicensing_listVersionAdded", | ||
4569 | ) | ||
4570 | |||
4571 | |||
4572 | # Portion of an AnyLicenseInfo representing this version, or any later version, | ||
4573 | # of the indicated License. | ||
4574 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/OrLaterOperator", compact_type="expandedlicensing_OrLaterOperator", abstract=False) | ||
4575 | class expandedlicensing_OrLaterOperator(expandedlicensing_ExtendableLicense): | ||
4576 | NODE_KIND = NodeKind.IRI | ||
4577 | ID_ALIAS = "spdxId" | ||
4578 | NAMED_INDIVIDUALS = { | ||
4579 | } | ||
4580 | |||
4581 | @classmethod | ||
4582 | def _register_props(cls): | ||
4583 | super()._register_props() | ||
4584 | # A License participating in an 'or later' model. | ||
4585 | cls._add_property( | ||
4586 | "expandedlicensing_subjectLicense", | ||
4587 | ObjectProp(expandedlicensing_License, True), | ||
4588 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectLicense", | ||
4589 | min_count=1, | ||
4590 | compact="expandedlicensing_subjectLicense", | ||
4591 | ) | ||
4592 | |||
4593 | |||
4594 | # Portion of an AnyLicenseInfo representing a License which has additional | ||
4595 | # text applied to it. | ||
4596 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/WithAdditionOperator", compact_type="expandedlicensing_WithAdditionOperator", abstract=False) | ||
4597 | class expandedlicensing_WithAdditionOperator(simplelicensing_AnyLicenseInfo): | ||
4598 | NODE_KIND = NodeKind.IRI | ||
4599 | ID_ALIAS = "spdxId" | ||
4600 | NAMED_INDIVIDUALS = { | ||
4601 | } | ||
4602 | |||
4603 | @classmethod | ||
4604 | def _register_props(cls): | ||
4605 | super()._register_props() | ||
4606 | # A LicenseAddition participating in a 'with addition' model. | ||
4607 | cls._add_property( | ||
4608 | "expandedlicensing_subjectAddition", | ||
4609 | ObjectProp(expandedlicensing_LicenseAddition, True), | ||
4610 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectAddition", | ||
4611 | min_count=1, | ||
4612 | compact="expandedlicensing_subjectAddition", | ||
4613 | ) | ||
4614 | # A License participating in a 'with addition' model. | ||
4615 | cls._add_property( | ||
4616 | "expandedlicensing_subjectExtendableLicense", | ||
4617 | ObjectProp(expandedlicensing_ExtendableLicense, True), | ||
4618 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectExtendableLicense", | ||
4619 | min_count=1, | ||
4620 | compact="expandedlicensing_subjectExtendableLicense", | ||
4621 | ) | ||
4622 | |||
4623 | |||
4624 | # A type of extension consisting of a list of name value pairs. | ||
4625 | @register("https://spdx.org/rdf/3.0.1/terms/Extension/CdxPropertiesExtension", compact_type="extension_CdxPropertiesExtension", abstract=False) | ||
4626 | class extension_CdxPropertiesExtension(extension_Extension): | ||
4627 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
4628 | NAMED_INDIVIDUALS = { | ||
4629 | } | ||
4630 | |||
4631 | @classmethod | ||
4632 | def _register_props(cls): | ||
4633 | super()._register_props() | ||
4634 | # Provides a map of a property names to a values. | ||
4635 | cls._add_property( | ||
4636 | "extension_cdxProperty", | ||
4637 | ListProp(ObjectProp(extension_CdxPropertyEntry, False)), | ||
4638 | iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxProperty", | ||
4639 | min_count=1, | ||
4640 | compact="extension_cdxProperty", | ||
4641 | ) | ||
4642 | |||
4643 | |||
4644 | # Provides a CVSS version 2.0 assessment for a vulnerability. | ||
4645 | @register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV2VulnAssessmentRelationship", compact_type="security_CvssV2VulnAssessmentRelationship", abstract=False) | ||
4646 | class security_CvssV2VulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
4647 | NODE_KIND = NodeKind.IRI | ||
4648 | ID_ALIAS = "spdxId" | ||
4649 | NAMED_INDIVIDUALS = { | ||
4650 | } | ||
4651 | |||
4652 | @classmethod | ||
4653 | def _register_props(cls): | ||
4654 | super()._register_props() | ||
4655 | # Provides a numerical (0-10) representation of the severity of a vulnerability. | ||
4656 | cls._add_property( | ||
4657 | "security_score", | ||
4658 | FloatProp(), | ||
4659 | iri="https://spdx.org/rdf/3.0.1/terms/Security/score", | ||
4660 | min_count=1, | ||
4661 | compact="security_score", | ||
4662 | ) | ||
4663 | # Specifies the CVSS vector string for a vulnerability. | ||
4664 | cls._add_property( | ||
4665 | "security_vectorString", | ||
4666 | StringProp(), | ||
4667 | iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString", | ||
4668 | min_count=1, | ||
4669 | compact="security_vectorString", | ||
4670 | ) | ||
4671 | |||
4672 | |||
4673 | # Provides a CVSS version 3 assessment for a vulnerability. | ||
4674 | @register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV3VulnAssessmentRelationship", compact_type="security_CvssV3VulnAssessmentRelationship", abstract=False) | ||
4675 | class security_CvssV3VulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
4676 | NODE_KIND = NodeKind.IRI | ||
4677 | ID_ALIAS = "spdxId" | ||
4678 | NAMED_INDIVIDUALS = { | ||
4679 | } | ||
4680 | |||
4681 | @classmethod | ||
4682 | def _register_props(cls): | ||
4683 | super()._register_props() | ||
4684 | # Provides a numerical (0-10) representation of the severity of a vulnerability. | ||
4685 | cls._add_property( | ||
4686 | "security_score", | ||
4687 | FloatProp(), | ||
4688 | iri="https://spdx.org/rdf/3.0.1/terms/Security/score", | ||
4689 | min_count=1, | ||
4690 | compact="security_score", | ||
4691 | ) | ||
4692 | # Specifies the CVSS qualitative severity rating of a vulnerability in relation to a piece of software. | ||
4693 | cls._add_property( | ||
4694 | "security_severity", | ||
4695 | EnumProp([ | ||
4696 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical", "critical"), | ||
4697 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high", "high"), | ||
4698 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low", "low"), | ||
4699 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium", "medium"), | ||
4700 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none", "none"), | ||
4701 | ]), | ||
4702 | iri="https://spdx.org/rdf/3.0.1/terms/Security/severity", | ||
4703 | min_count=1, | ||
4704 | compact="security_severity", | ||
4705 | ) | ||
4706 | # Specifies the CVSS vector string for a vulnerability. | ||
4707 | cls._add_property( | ||
4708 | "security_vectorString", | ||
4709 | StringProp(), | ||
4710 | iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString", | ||
4711 | min_count=1, | ||
4712 | compact="security_vectorString", | ||
4713 | ) | ||
4714 | |||
4715 | |||
4716 | # Provides a CVSS version 4 assessment for a vulnerability. | ||
4717 | @register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV4VulnAssessmentRelationship", compact_type="security_CvssV4VulnAssessmentRelationship", abstract=False) | ||
4718 | class security_CvssV4VulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
4719 | NODE_KIND = NodeKind.IRI | ||
4720 | ID_ALIAS = "spdxId" | ||
4721 | NAMED_INDIVIDUALS = { | ||
4722 | } | ||
4723 | |||
4724 | @classmethod | ||
4725 | def _register_props(cls): | ||
4726 | super()._register_props() | ||
4727 | # Provides a numerical (0-10) representation of the severity of a vulnerability. | ||
4728 | cls._add_property( | ||
4729 | "security_score", | ||
4730 | FloatProp(), | ||
4731 | iri="https://spdx.org/rdf/3.0.1/terms/Security/score", | ||
4732 | min_count=1, | ||
4733 | compact="security_score", | ||
4734 | ) | ||
4735 | # Specifies the CVSS qualitative severity rating of a vulnerability in relation to a piece of software. | ||
4736 | cls._add_property( | ||
4737 | "security_severity", | ||
4738 | EnumProp([ | ||
4739 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical", "critical"), | ||
4740 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high", "high"), | ||
4741 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low", "low"), | ||
4742 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium", "medium"), | ||
4743 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none", "none"), | ||
4744 | ]), | ||
4745 | iri="https://spdx.org/rdf/3.0.1/terms/Security/severity", | ||
4746 | min_count=1, | ||
4747 | compact="security_severity", | ||
4748 | ) | ||
4749 | # Specifies the CVSS vector string for a vulnerability. | ||
4750 | cls._add_property( | ||
4751 | "security_vectorString", | ||
4752 | StringProp(), | ||
4753 | iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString", | ||
4754 | min_count=1, | ||
4755 | compact="security_vectorString", | ||
4756 | ) | ||
4757 | |||
4758 | |||
4759 | # Provides an EPSS assessment for a vulnerability. | ||
4760 | @register("https://spdx.org/rdf/3.0.1/terms/Security/EpssVulnAssessmentRelationship", compact_type="security_EpssVulnAssessmentRelationship", abstract=False) | ||
4761 | class security_EpssVulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
4762 | NODE_KIND = NodeKind.IRI | ||
4763 | ID_ALIAS = "spdxId" | ||
4764 | NAMED_INDIVIDUALS = { | ||
4765 | } | ||
4766 | |||
4767 | @classmethod | ||
4768 | def _register_props(cls): | ||
4769 | super()._register_props() | ||
4770 | # The percentile of the current probability score. | ||
4771 | cls._add_property( | ||
4772 | "security_percentile", | ||
4773 | FloatProp(), | ||
4774 | iri="https://spdx.org/rdf/3.0.1/terms/Security/percentile", | ||
4775 | min_count=1, | ||
4776 | compact="security_percentile", | ||
4777 | ) | ||
4778 | # A probability score between 0 and 1 of a vulnerability being exploited. | ||
4779 | cls._add_property( | ||
4780 | "security_probability", | ||
4781 | FloatProp(), | ||
4782 | iri="https://spdx.org/rdf/3.0.1/terms/Security/probability", | ||
4783 | min_count=1, | ||
4784 | compact="security_probability", | ||
4785 | ) | ||
4786 | |||
4787 | |||
4788 | # Provides an exploit assessment of a vulnerability. | ||
4789 | @register("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogVulnAssessmentRelationship", compact_type="security_ExploitCatalogVulnAssessmentRelationship", abstract=False) | ||
4790 | class security_ExploitCatalogVulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
4791 | NODE_KIND = NodeKind.IRI | ||
4792 | ID_ALIAS = "spdxId" | ||
4793 | NAMED_INDIVIDUALS = { | ||
4794 | } | ||
4795 | |||
4796 | @classmethod | ||
4797 | def _register_props(cls): | ||
4798 | super()._register_props() | ||
4799 | # Specifies the exploit catalog type. | ||
4800 | cls._add_property( | ||
4801 | "security_catalogType", | ||
4802 | EnumProp([ | ||
4803 | ("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev", "kev"), | ||
4804 | ("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other", "other"), | ||
4805 | ]), | ||
4806 | iri="https://spdx.org/rdf/3.0.1/terms/Security/catalogType", | ||
4807 | min_count=1, | ||
4808 | compact="security_catalogType", | ||
4809 | ) | ||
4810 | # Describe that a CVE is known to have an exploit because it's been listed in an exploit catalog. | ||
4811 | cls._add_property( | ||
4812 | "security_exploited", | ||
4813 | BooleanProp(), | ||
4814 | iri="https://spdx.org/rdf/3.0.1/terms/Security/exploited", | ||
4815 | min_count=1, | ||
4816 | compact="security_exploited", | ||
4817 | ) | ||
4818 | # Provides the location of an exploit catalog. | ||
4819 | cls._add_property( | ||
4820 | "security_locator", | ||
4821 | AnyURIProp(), | ||
4822 | iri="https://spdx.org/rdf/3.0.1/terms/Security/locator", | ||
4823 | min_count=1, | ||
4824 | compact="security_locator", | ||
4825 | ) | ||
4826 | |||
4827 | |||
4828 | # Provides an SSVC assessment for a vulnerability. | ||
4829 | @register("https://spdx.org/rdf/3.0.1/terms/Security/SsvcVulnAssessmentRelationship", compact_type="security_SsvcVulnAssessmentRelationship", abstract=False) | ||
4830 | class security_SsvcVulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
4831 | NODE_KIND = NodeKind.IRI | ||
4832 | ID_ALIAS = "spdxId" | ||
4833 | NAMED_INDIVIDUALS = { | ||
4834 | } | ||
4835 | |||
4836 | @classmethod | ||
4837 | def _register_props(cls): | ||
4838 | super()._register_props() | ||
4839 | # Provide the enumeration of possible decisions in the | ||
4840 | # [Stakeholder-Specific Vulnerability Categorization (SSVC) decision tree](https://www.cisa.gov/stakeholder-specific-vulnerability-categorization-ssvc). | ||
4841 | cls._add_property( | ||
4842 | "security_decisionType", | ||
4843 | EnumProp([ | ||
4844 | ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act", "act"), | ||
4845 | ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend", "attend"), | ||
4846 | ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track", "track"), | ||
4847 | ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar", "trackStar"), | ||
4848 | ]), | ||
4849 | iri="https://spdx.org/rdf/3.0.1/terms/Security/decisionType", | ||
4850 | min_count=1, | ||
4851 | compact="security_decisionType", | ||
4852 | ) | ||
4853 | |||
4854 | |||
4855 | # Abstract ancestor class for all VEX relationships | ||
4856 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexVulnAssessmentRelationship", compact_type="security_VexVulnAssessmentRelationship", abstract=True) | ||
4857 | class security_VexVulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
4858 | NODE_KIND = NodeKind.IRI | ||
4859 | ID_ALIAS = "spdxId" | ||
4860 | NAMED_INDIVIDUALS = { | ||
4861 | } | ||
4862 | |||
4863 | @classmethod | ||
4864 | def _register_props(cls): | ||
4865 | super()._register_props() | ||
4866 | # Conveys information about how VEX status was determined. | ||
4867 | cls._add_property( | ||
4868 | "security_statusNotes", | ||
4869 | StringProp(), | ||
4870 | iri="https://spdx.org/rdf/3.0.1/terms/Security/statusNotes", | ||
4871 | compact="security_statusNotes", | ||
4872 | ) | ||
4873 | # Specifies the version of a VEX statement. | ||
4874 | cls._add_property( | ||
4875 | "security_vexVersion", | ||
4876 | StringProp(), | ||
4877 | iri="https://spdx.org/rdf/3.0.1/terms/Security/vexVersion", | ||
4878 | compact="security_vexVersion", | ||
4879 | ) | ||
4880 | |||
4881 | |||
4882 | # Specifies a vulnerability and its associated information. | ||
4883 | @register("https://spdx.org/rdf/3.0.1/terms/Security/Vulnerability", compact_type="security_Vulnerability", abstract=False) | ||
4884 | class security_Vulnerability(Artifact): | ||
4885 | NODE_KIND = NodeKind.IRI | ||
4886 | ID_ALIAS = "spdxId" | ||
4887 | NAMED_INDIVIDUALS = { | ||
4888 | } | ||
4889 | |||
4890 | @classmethod | ||
4891 | def _register_props(cls): | ||
4892 | super()._register_props() | ||
4893 | # Specifies a time when a vulnerability assessment was modified | ||
4894 | cls._add_property( | ||
4895 | "security_modifiedTime", | ||
4896 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4897 | iri="https://spdx.org/rdf/3.0.1/terms/Security/modifiedTime", | ||
4898 | compact="security_modifiedTime", | ||
4899 | ) | ||
4900 | # Specifies the time when a vulnerability was published. | ||
4901 | cls._add_property( | ||
4902 | "security_publishedTime", | ||
4903 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4904 | iri="https://spdx.org/rdf/3.0.1/terms/Security/publishedTime", | ||
4905 | compact="security_publishedTime", | ||
4906 | ) | ||
4907 | # Specified the time and date when a vulnerability was withdrawn. | ||
4908 | cls._add_property( | ||
4909 | "security_withdrawnTime", | ||
4910 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4911 | iri="https://spdx.org/rdf/3.0.1/terms/Security/withdrawnTime", | ||
4912 | compact="security_withdrawnTime", | ||
4913 | ) | ||
4914 | |||
4915 | |||
4916 | # A distinct article or unit related to Software. | ||
4917 | @register("https://spdx.org/rdf/3.0.1/terms/Software/SoftwareArtifact", compact_type="software_SoftwareArtifact", abstract=True) | ||
4918 | class software_SoftwareArtifact(Artifact): | ||
4919 | NODE_KIND = NodeKind.IRI | ||
4920 | ID_ALIAS = "spdxId" | ||
4921 | NAMED_INDIVIDUALS = { | ||
4922 | } | ||
4923 | |||
4924 | @classmethod | ||
4925 | def _register_props(cls): | ||
4926 | super()._register_props() | ||
4927 | # Provides additional purpose information of the software artifact. | ||
4928 | cls._add_property( | ||
4929 | "software_additionalPurpose", | ||
4930 | ListProp(EnumProp([ | ||
4931 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application", "application"), | ||
4932 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive", "archive"), | ||
4933 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom", "bom"), | ||
4934 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration", "configuration"), | ||
4935 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container", "container"), | ||
4936 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data", "data"), | ||
4937 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device", "device"), | ||
4938 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver", "deviceDriver"), | ||
4939 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage", "diskImage"), | ||
4940 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation", "documentation"), | ||
4941 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence", "evidence"), | ||
4942 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable", "executable"), | ||
4943 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file", "file"), | ||
4944 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage", "filesystemImage"), | ||
4945 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware", "firmware"), | ||
4946 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework", "framework"), | ||
4947 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install", "install"), | ||
4948 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library", "library"), | ||
4949 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest", "manifest"), | ||
4950 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model", "model"), | ||
4951 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module", "module"), | ||
4952 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem", "operatingSystem"), | ||
4953 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other", "other"), | ||
4954 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch", "patch"), | ||
4955 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform", "platform"), | ||
4956 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement", "requirement"), | ||
4957 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source", "source"), | ||
4958 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification", "specification"), | ||
4959 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test", "test"), | ||
4960 | ])), | ||
4961 | iri="https://spdx.org/rdf/3.0.1/terms/Software/additionalPurpose", | ||
4962 | compact="software_additionalPurpose", | ||
4963 | ) | ||
4964 | # Provides a place for the SPDX data creator to record acknowledgement text for | ||
4965 | # a software Package, File or Snippet. | ||
4966 | cls._add_property( | ||
4967 | "software_attributionText", | ||
4968 | ListProp(StringProp()), | ||
4969 | iri="https://spdx.org/rdf/3.0.1/terms/Software/attributionText", | ||
4970 | compact="software_attributionText", | ||
4971 | ) | ||
4972 | # A canonical, unique, immutable identifier of the artifact content, that may be | ||
4973 | # used for verifying its identity and/or integrity. | ||
4974 | cls._add_property( | ||
4975 | "software_contentIdentifier", | ||
4976 | ListProp(ObjectProp(software_ContentIdentifier, False)), | ||
4977 | iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifier", | ||
4978 | compact="software_contentIdentifier", | ||
4979 | ) | ||
4980 | # Identifies the text of one or more copyright notices for a software Package, | ||
4981 | # File or Snippet, if any. | ||
4982 | cls._add_property( | ||
4983 | "software_copyrightText", | ||
4984 | StringProp(), | ||
4985 | iri="https://spdx.org/rdf/3.0.1/terms/Software/copyrightText", | ||
4986 | compact="software_copyrightText", | ||
4987 | ) | ||
4988 | # Provides information about the primary purpose of the software artifact. | ||
4989 | cls._add_property( | ||
4990 | "software_primaryPurpose", | ||
4991 | EnumProp([ | ||
4992 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application", "application"), | ||
4993 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive", "archive"), | ||
4994 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom", "bom"), | ||
4995 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration", "configuration"), | ||
4996 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container", "container"), | ||
4997 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data", "data"), | ||
4998 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device", "device"), | ||
4999 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver", "deviceDriver"), | ||
5000 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage", "diskImage"), | ||
5001 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation", "documentation"), | ||
5002 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence", "evidence"), | ||
5003 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable", "executable"), | ||
5004 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file", "file"), | ||
5005 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage", "filesystemImage"), | ||
5006 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware", "firmware"), | ||
5007 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework", "framework"), | ||
5008 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install", "install"), | ||
5009 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library", "library"), | ||
5010 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest", "manifest"), | ||
5011 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model", "model"), | ||
5012 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module", "module"), | ||
5013 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem", "operatingSystem"), | ||
5014 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other", "other"), | ||
5015 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch", "patch"), | ||
5016 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform", "platform"), | ||
5017 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement", "requirement"), | ||
5018 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source", "source"), | ||
5019 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification", "specification"), | ||
5020 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test", "test"), | ||
5021 | ]), | ||
5022 | iri="https://spdx.org/rdf/3.0.1/terms/Software/primaryPurpose", | ||
5023 | compact="software_primaryPurpose", | ||
5024 | ) | ||
5025 | |||
5026 | |||
5027 | # A container for a grouping of SPDX-3.0 content characterizing details | ||
5028 | # (provenence, composition, licensing, etc.) about a product. | ||
5029 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Bom", compact_type="Bom", abstract=False) | ||
5030 | class Bom(Bundle): | ||
5031 | NODE_KIND = NodeKind.IRI | ||
5032 | ID_ALIAS = "spdxId" | ||
5033 | NAMED_INDIVIDUALS = { | ||
5034 | } | ||
5035 | |||
5036 | |||
5037 | # A license that is not listed on the SPDX License List. | ||
5038 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/CustomLicense", compact_type="expandedlicensing_CustomLicense", abstract=False) | ||
5039 | class expandedlicensing_CustomLicense(expandedlicensing_License): | ||
5040 | NODE_KIND = NodeKind.IRI | ||
5041 | ID_ALIAS = "spdxId" | ||
5042 | NAMED_INDIVIDUALS = { | ||
5043 | } | ||
5044 | |||
5045 | |||
5046 | # Connects a vulnerability and an element designating the element as a product | ||
5047 | # affected by the vulnerability. | ||
5048 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexAffectedVulnAssessmentRelationship", compact_type="security_VexAffectedVulnAssessmentRelationship", abstract=False) | ||
5049 | class security_VexAffectedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship): | ||
5050 | NODE_KIND = NodeKind.IRI | ||
5051 | ID_ALIAS = "spdxId" | ||
5052 | NAMED_INDIVIDUALS = { | ||
5053 | } | ||
5054 | |||
5055 | @classmethod | ||
5056 | def _register_props(cls): | ||
5057 | super()._register_props() | ||
5058 | # Provides advise on how to mitigate or remediate a vulnerability when a VEX product | ||
5059 | # is affected by it. | ||
5060 | cls._add_property( | ||
5061 | "security_actionStatement", | ||
5062 | StringProp(), | ||
5063 | iri="https://spdx.org/rdf/3.0.1/terms/Security/actionStatement", | ||
5064 | min_count=1, | ||
5065 | compact="security_actionStatement", | ||
5066 | ) | ||
5067 | # Records the time when a recommended action was communicated in a VEX statement | ||
5068 | # to mitigate a vulnerability. | ||
5069 | cls._add_property( | ||
5070 | "security_actionStatementTime", | ||
5071 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
5072 | iri="https://spdx.org/rdf/3.0.1/terms/Security/actionStatementTime", | ||
5073 | compact="security_actionStatementTime", | ||
5074 | ) | ||
5075 | |||
5076 | |||
5077 | # Links a vulnerability and elements representing products (in the VEX sense) where | ||
5078 | # a fix has been applied and are no longer affected. | ||
5079 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexFixedVulnAssessmentRelationship", compact_type="security_VexFixedVulnAssessmentRelationship", abstract=False) | ||
5080 | class security_VexFixedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship): | ||
5081 | NODE_KIND = NodeKind.IRI | ||
5082 | ID_ALIAS = "spdxId" | ||
5083 | NAMED_INDIVIDUALS = { | ||
5084 | } | ||
5085 | |||
5086 | |||
5087 | # Links a vulnerability and one or more elements designating the latter as products | ||
5088 | # not affected by the vulnerability. | ||
5089 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexNotAffectedVulnAssessmentRelationship", compact_type="security_VexNotAffectedVulnAssessmentRelationship", abstract=False) | ||
5090 | class security_VexNotAffectedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship): | ||
5091 | NODE_KIND = NodeKind.IRI | ||
5092 | ID_ALIAS = "spdxId" | ||
5093 | NAMED_INDIVIDUALS = { | ||
5094 | } | ||
5095 | |||
5096 | @classmethod | ||
5097 | def _register_props(cls): | ||
5098 | super()._register_props() | ||
5099 | # Explains why a VEX product is not affected by a vulnerability. It is an | ||
5100 | # alternative in VexNotAffectedVulnAssessmentRelationship to the machine-readable | ||
5101 | # justification label. | ||
5102 | cls._add_property( | ||
5103 | "security_impactStatement", | ||
5104 | StringProp(), | ||
5105 | iri="https://spdx.org/rdf/3.0.1/terms/Security/impactStatement", | ||
5106 | compact="security_impactStatement", | ||
5107 | ) | ||
5108 | # Timestamp of impact statement. | ||
5109 | cls._add_property( | ||
5110 | "security_impactStatementTime", | ||
5111 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
5112 | iri="https://spdx.org/rdf/3.0.1/terms/Security/impactStatementTime", | ||
5113 | compact="security_impactStatementTime", | ||
5114 | ) | ||
5115 | # Impact justification label to be used when linking a vulnerability to an element | ||
5116 | # representing a VEX product with a VexNotAffectedVulnAssessmentRelationship | ||
5117 | # relationship. | ||
5118 | cls._add_property( | ||
5119 | "security_justificationType", | ||
5120 | EnumProp([ | ||
5121 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent", "componentNotPresent"), | ||
5122 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist", "inlineMitigationsAlreadyExist"), | ||
5123 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary", "vulnerableCodeCannotBeControlledByAdversary"), | ||
5124 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath", "vulnerableCodeNotInExecutePath"), | ||
5125 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent", "vulnerableCodeNotPresent"), | ||
5126 | ]), | ||
5127 | iri="https://spdx.org/rdf/3.0.1/terms/Security/justificationType", | ||
5128 | compact="security_justificationType", | ||
5129 | ) | ||
5130 | |||
5131 | |||
5132 | # Designates elements as products where the impact of a vulnerability is being | ||
5133 | # investigated. | ||
5134 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexUnderInvestigationVulnAssessmentRelationship", compact_type="security_VexUnderInvestigationVulnAssessmentRelationship", abstract=False) | ||
5135 | class security_VexUnderInvestigationVulnAssessmentRelationship(security_VexVulnAssessmentRelationship): | ||
5136 | NODE_KIND = NodeKind.IRI | ||
5137 | ID_ALIAS = "spdxId" | ||
5138 | NAMED_INDIVIDUALS = { | ||
5139 | } | ||
5140 | |||
5141 | |||
5142 | # Refers to any object that stores content on a computer. | ||
5143 | @register("https://spdx.org/rdf/3.0.1/terms/Software/File", compact_type="software_File", abstract=False) | ||
5144 | class software_File(software_SoftwareArtifact): | ||
5145 | NODE_KIND = NodeKind.IRI | ||
5146 | ID_ALIAS = "spdxId" | ||
5147 | NAMED_INDIVIDUALS = { | ||
5148 | } | ||
5149 | |||
5150 | @classmethod | ||
5151 | def _register_props(cls): | ||
5152 | super()._register_props() | ||
5153 | # Provides information about the content type of an Element or a Property. | ||
5154 | cls._add_property( | ||
5155 | "contentType", | ||
5156 | StringProp(pattern=r"^[^\/]+\/[^\/]+$",), | ||
5157 | iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType", | ||
5158 | compact="contentType", | ||
5159 | ) | ||
5160 | # Describes if a given file is a directory or non-directory kind of file. | ||
5161 | cls._add_property( | ||
5162 | "software_fileKind", | ||
5163 | EnumProp([ | ||
5164 | ("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory", "directory"), | ||
5165 | ("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file", "file"), | ||
5166 | ]), | ||
5167 | iri="https://spdx.org/rdf/3.0.1/terms/Software/fileKind", | ||
5168 | compact="software_fileKind", | ||
5169 | ) | ||
5170 | |||
5171 | |||
5172 | # Refers to any unit of content that can be associated with a distribution of | ||
5173 | # software. | ||
5174 | @register("https://spdx.org/rdf/3.0.1/terms/Software/Package", compact_type="software_Package", abstract=False) | ||
5175 | class software_Package(software_SoftwareArtifact): | ||
5176 | NODE_KIND = NodeKind.IRI | ||
5177 | ID_ALIAS = "spdxId" | ||
5178 | NAMED_INDIVIDUALS = { | ||
5179 | } | ||
5180 | |||
5181 | @classmethod | ||
5182 | def _register_props(cls): | ||
5183 | super()._register_props() | ||
5184 | # Identifies the download Uniform Resource Identifier for the package at the time | ||
5185 | # that the document was created. | ||
5186 | cls._add_property( | ||
5187 | "software_downloadLocation", | ||
5188 | AnyURIProp(), | ||
5189 | iri="https://spdx.org/rdf/3.0.1/terms/Software/downloadLocation", | ||
5190 | compact="software_downloadLocation", | ||
5191 | ) | ||
5192 | # A place for the SPDX document creator to record a website that serves as the | ||
5193 | # package's home page. | ||
5194 | cls._add_property( | ||
5195 | "software_homePage", | ||
5196 | AnyURIProp(), | ||
5197 | iri="https://spdx.org/rdf/3.0.1/terms/Software/homePage", | ||
5198 | compact="software_homePage", | ||
5199 | ) | ||
5200 | # Provides a place for the SPDX data creator to record the package URL string | ||
5201 | # (in accordance with the Package URL specification) for a software Package. | ||
5202 | cls._add_property( | ||
5203 | "software_packageUrl", | ||
5204 | AnyURIProp(), | ||
5205 | iri="https://spdx.org/rdf/3.0.1/terms/Software/packageUrl", | ||
5206 | compact="software_packageUrl", | ||
5207 | ) | ||
5208 | # Identify the version of a package. | ||
5209 | cls._add_property( | ||
5210 | "software_packageVersion", | ||
5211 | StringProp(), | ||
5212 | iri="https://spdx.org/rdf/3.0.1/terms/Software/packageVersion", | ||
5213 | compact="software_packageVersion", | ||
5214 | ) | ||
5215 | # Records any relevant background information or additional comments | ||
5216 | # about the origin of the package. | ||
5217 | cls._add_property( | ||
5218 | "software_sourceInfo", | ||
5219 | StringProp(), | ||
5220 | iri="https://spdx.org/rdf/3.0.1/terms/Software/sourceInfo", | ||
5221 | compact="software_sourceInfo", | ||
5222 | ) | ||
5223 | |||
5224 | |||
5225 | # A collection of SPDX Elements describing a single package. | ||
5226 | @register("https://spdx.org/rdf/3.0.1/terms/Software/Sbom", compact_type="software_Sbom", abstract=False) | ||
5227 | class software_Sbom(Bom): | ||
5228 | NODE_KIND = NodeKind.IRI | ||
5229 | ID_ALIAS = "spdxId" | ||
5230 | NAMED_INDIVIDUALS = { | ||
5231 | } | ||
5232 | |||
5233 | @classmethod | ||
5234 | def _register_props(cls): | ||
5235 | super()._register_props() | ||
5236 | # Provides information about the type of an SBOM. | ||
5237 | cls._add_property( | ||
5238 | "software_sbomType", | ||
5239 | ListProp(EnumProp([ | ||
5240 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed", "analyzed"), | ||
5241 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build", "build"), | ||
5242 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed", "deployed"), | ||
5243 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design", "design"), | ||
5244 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime", "runtime"), | ||
5245 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source", "source"), | ||
5246 | ])), | ||
5247 | iri="https://spdx.org/rdf/3.0.1/terms/Software/sbomType", | ||
5248 | compact="software_sbomType", | ||
5249 | ) | ||
5250 | |||
5251 | |||
5252 | # Describes a certain part of a file. | ||
5253 | @register("https://spdx.org/rdf/3.0.1/terms/Software/Snippet", compact_type="software_Snippet", abstract=False) | ||
5254 | class software_Snippet(software_SoftwareArtifact): | ||
5255 | NODE_KIND = NodeKind.IRI | ||
5256 | ID_ALIAS = "spdxId" | ||
5257 | NAMED_INDIVIDUALS = { | ||
5258 | } | ||
5259 | |||
5260 | @classmethod | ||
5261 | def _register_props(cls): | ||
5262 | super()._register_props() | ||
5263 | # Defines the byte range in the original host file that the snippet information | ||
5264 | # applies to. | ||
5265 | cls._add_property( | ||
5266 | "software_byteRange", | ||
5267 | ObjectProp(PositiveIntegerRange, False), | ||
5268 | iri="https://spdx.org/rdf/3.0.1/terms/Software/byteRange", | ||
5269 | compact="software_byteRange", | ||
5270 | ) | ||
5271 | # Defines the line range in the original host file that the snippet information | ||
5272 | # applies to. | ||
5273 | cls._add_property( | ||
5274 | "software_lineRange", | ||
5275 | ObjectProp(PositiveIntegerRange, False), | ||
5276 | iri="https://spdx.org/rdf/3.0.1/terms/Software/lineRange", | ||
5277 | compact="software_lineRange", | ||
5278 | ) | ||
5279 | # Defines the original host file that the snippet information applies to. | ||
5280 | cls._add_property( | ||
5281 | "software_snippetFromFile", | ||
5282 | ObjectProp(software_File, True), | ||
5283 | iri="https://spdx.org/rdf/3.0.1/terms/Software/snippetFromFile", | ||
5284 | min_count=1, | ||
5285 | compact="software_snippetFromFile", | ||
5286 | ) | ||
5287 | |||
5288 | |||
5289 | # Specifies an AI package and its associated information. | ||
5290 | @register("https://spdx.org/rdf/3.0.1/terms/AI/AIPackage", compact_type="ai_AIPackage", abstract=False) | ||
5291 | class ai_AIPackage(software_Package): | ||
5292 | NODE_KIND = NodeKind.IRI | ||
5293 | ID_ALIAS = "spdxId" | ||
5294 | NAMED_INDIVIDUALS = { | ||
5295 | } | ||
5296 | |||
5297 | @classmethod | ||
5298 | def _register_props(cls): | ||
5299 | super()._register_props() | ||
5300 | # Indicates whether the system can perform a decision or action without human | ||
5301 | # involvement or guidance. | ||
5302 | cls._add_property( | ||
5303 | "ai_autonomyType", | ||
5304 | EnumProp([ | ||
5305 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"), | ||
5306 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"), | ||
5307 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"), | ||
5308 | ]), | ||
5309 | iri="https://spdx.org/rdf/3.0.1/terms/AI/autonomyType", | ||
5310 | compact="ai_autonomyType", | ||
5311 | ) | ||
5312 | # Captures the domain in which the AI package can be used. | ||
5313 | cls._add_property( | ||
5314 | "ai_domain", | ||
5315 | ListProp(StringProp()), | ||
5316 | iri="https://spdx.org/rdf/3.0.1/terms/AI/domain", | ||
5317 | compact="ai_domain", | ||
5318 | ) | ||
5319 | # Indicates the amount of energy consumption incurred by an AI model. | ||
5320 | cls._add_property( | ||
5321 | "ai_energyConsumption", | ||
5322 | ObjectProp(ai_EnergyConsumption, False), | ||
5323 | iri="https://spdx.org/rdf/3.0.1/terms/AI/energyConsumption", | ||
5324 | compact="ai_energyConsumption", | ||
5325 | ) | ||
5326 | # Records a hyperparameter used to build the AI model contained in the AI | ||
5327 | # package. | ||
5328 | cls._add_property( | ||
5329 | "ai_hyperparameter", | ||
5330 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
5331 | iri="https://spdx.org/rdf/3.0.1/terms/AI/hyperparameter", | ||
5332 | compact="ai_hyperparameter", | ||
5333 | ) | ||
5334 | # Provides relevant information about the AI software, not including the model | ||
5335 | # description. | ||
5336 | cls._add_property( | ||
5337 | "ai_informationAboutApplication", | ||
5338 | StringProp(), | ||
5339 | iri="https://spdx.org/rdf/3.0.1/terms/AI/informationAboutApplication", | ||
5340 | compact="ai_informationAboutApplication", | ||
5341 | ) | ||
5342 | # Describes relevant information about different steps of the training process. | ||
5343 | cls._add_property( | ||
5344 | "ai_informationAboutTraining", | ||
5345 | StringProp(), | ||
5346 | iri="https://spdx.org/rdf/3.0.1/terms/AI/informationAboutTraining", | ||
5347 | compact="ai_informationAboutTraining", | ||
5348 | ) | ||
5349 | # Captures a limitation of the AI software. | ||
5350 | cls._add_property( | ||
5351 | "ai_limitation", | ||
5352 | StringProp(), | ||
5353 | iri="https://spdx.org/rdf/3.0.1/terms/AI/limitation", | ||
5354 | compact="ai_limitation", | ||
5355 | ) | ||
5356 | # Records the measurement of prediction quality of the AI model. | ||
5357 | cls._add_property( | ||
5358 | "ai_metric", | ||
5359 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
5360 | iri="https://spdx.org/rdf/3.0.1/terms/AI/metric", | ||
5361 | compact="ai_metric", | ||
5362 | ) | ||
5363 | # Captures the threshold that was used for computation of a metric described in | ||
5364 | # the metric field. | ||
5365 | cls._add_property( | ||
5366 | "ai_metricDecisionThreshold", | ||
5367 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
5368 | iri="https://spdx.org/rdf/3.0.1/terms/AI/metricDecisionThreshold", | ||
5369 | compact="ai_metricDecisionThreshold", | ||
5370 | ) | ||
5371 | # Describes all the preprocessing steps applied to the training data before the | ||
5372 | # model training. | ||
5373 | cls._add_property( | ||
5374 | "ai_modelDataPreprocessing", | ||
5375 | ListProp(StringProp()), | ||
5376 | iri="https://spdx.org/rdf/3.0.1/terms/AI/modelDataPreprocessing", | ||
5377 | compact="ai_modelDataPreprocessing", | ||
5378 | ) | ||
5379 | # Describes methods that can be used to explain the results from the AI model. | ||
5380 | cls._add_property( | ||
5381 | "ai_modelExplainability", | ||
5382 | ListProp(StringProp()), | ||
5383 | iri="https://spdx.org/rdf/3.0.1/terms/AI/modelExplainability", | ||
5384 | compact="ai_modelExplainability", | ||
5385 | ) | ||
5386 | # Records the results of general safety risk assessment of the AI system. | ||
5387 | cls._add_property( | ||
5388 | "ai_safetyRiskAssessment", | ||
5389 | EnumProp([ | ||
5390 | ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high", "high"), | ||
5391 | ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low", "low"), | ||
5392 | ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium", "medium"), | ||
5393 | ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious", "serious"), | ||
5394 | ]), | ||
5395 | iri="https://spdx.org/rdf/3.0.1/terms/AI/safetyRiskAssessment", | ||
5396 | compact="ai_safetyRiskAssessment", | ||
5397 | ) | ||
5398 | # Captures a standard that is being complied with. | ||
5399 | cls._add_property( | ||
5400 | "ai_standardCompliance", | ||
5401 | ListProp(StringProp()), | ||
5402 | iri="https://spdx.org/rdf/3.0.1/terms/AI/standardCompliance", | ||
5403 | compact="ai_standardCompliance", | ||
5404 | ) | ||
5405 | # Records the type of the model used in the AI software. | ||
5406 | cls._add_property( | ||
5407 | "ai_typeOfModel", | ||
5408 | ListProp(StringProp()), | ||
5409 | iri="https://spdx.org/rdf/3.0.1/terms/AI/typeOfModel", | ||
5410 | compact="ai_typeOfModel", | ||
5411 | ) | ||
5412 | # Records if sensitive personal information is used during model training or | ||
5413 | # could be used during the inference. | ||
5414 | cls._add_property( | ||
5415 | "ai_useSensitivePersonalInformation", | ||
5416 | EnumProp([ | ||
5417 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"), | ||
5418 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"), | ||
5419 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"), | ||
5420 | ]), | ||
5421 | iri="https://spdx.org/rdf/3.0.1/terms/AI/useSensitivePersonalInformation", | ||
5422 | compact="ai_useSensitivePersonalInformation", | ||
5423 | ) | ||
5424 | |||
5425 | |||
5426 | # Specifies a data package and its associated information. | ||
5427 | @register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetPackage", compact_type="dataset_DatasetPackage", abstract=False) | ||
5428 | class dataset_DatasetPackage(software_Package): | ||
5429 | NODE_KIND = NodeKind.IRI | ||
5430 | ID_ALIAS = "spdxId" | ||
5431 | NAMED_INDIVIDUALS = { | ||
5432 | } | ||
5433 | |||
5434 | @classmethod | ||
5435 | def _register_props(cls): | ||
5436 | super()._register_props() | ||
5437 | # Describes the anonymization methods used. | ||
5438 | cls._add_property( | ||
5439 | "dataset_anonymizationMethodUsed", | ||
5440 | ListProp(StringProp()), | ||
5441 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/anonymizationMethodUsed", | ||
5442 | compact="dataset_anonymizationMethodUsed", | ||
5443 | ) | ||
5444 | # Describes the confidentiality level of the data points contained in the dataset. | ||
5445 | cls._add_property( | ||
5446 | "dataset_confidentialityLevel", | ||
5447 | EnumProp([ | ||
5448 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber", "amber"), | ||
5449 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear", "clear"), | ||
5450 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green", "green"), | ||
5451 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red", "red"), | ||
5452 | ]), | ||
5453 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/confidentialityLevel", | ||
5454 | compact="dataset_confidentialityLevel", | ||
5455 | ) | ||
5456 | # Describes how the dataset was collected. | ||
5457 | cls._add_property( | ||
5458 | "dataset_dataCollectionProcess", | ||
5459 | StringProp(), | ||
5460 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/dataCollectionProcess", | ||
5461 | compact="dataset_dataCollectionProcess", | ||
5462 | ) | ||
5463 | # Describes the preprocessing steps that were applied to the raw data to create the given dataset. | ||
5464 | cls._add_property( | ||
5465 | "dataset_dataPreprocessing", | ||
5466 | ListProp(StringProp()), | ||
5467 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/dataPreprocessing", | ||
5468 | compact="dataset_dataPreprocessing", | ||
5469 | ) | ||
5470 | # The field describes the availability of a dataset. | ||
5471 | cls._add_property( | ||
5472 | "dataset_datasetAvailability", | ||
5473 | EnumProp([ | ||
5474 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough", "clickthrough"), | ||
5475 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload", "directDownload"), | ||
5476 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query", "query"), | ||
5477 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration", "registration"), | ||
5478 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript", "scrapingScript"), | ||
5479 | ]), | ||
5480 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetAvailability", | ||
5481 | compact="dataset_datasetAvailability", | ||
5482 | ) | ||
5483 | # Describes potentially noisy elements of the dataset. | ||
5484 | cls._add_property( | ||
5485 | "dataset_datasetNoise", | ||
5486 | StringProp(), | ||
5487 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetNoise", | ||
5488 | compact="dataset_datasetNoise", | ||
5489 | ) | ||
5490 | # Captures the size of the dataset. | ||
5491 | cls._add_property( | ||
5492 | "dataset_datasetSize", | ||
5493 | NonNegativeIntegerProp(), | ||
5494 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetSize", | ||
5495 | compact="dataset_datasetSize", | ||
5496 | ) | ||
5497 | # Describes the type of the given dataset. | ||
5498 | cls._add_property( | ||
5499 | "dataset_datasetType", | ||
5500 | ListProp(EnumProp([ | ||
5501 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio", "audio"), | ||
5502 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical", "categorical"), | ||
5503 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph", "graph"), | ||
5504 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image", "image"), | ||
5505 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion", "noAssertion"), | ||
5506 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric", "numeric"), | ||
5507 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other", "other"), | ||
5508 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor", "sensor"), | ||
5509 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured", "structured"), | ||
5510 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic", "syntactic"), | ||
5511 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text", "text"), | ||
5512 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries", "timeseries"), | ||
5513 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp", "timestamp"), | ||
5514 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video", "video"), | ||
5515 | ])), | ||
5516 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetType", | ||
5517 | min_count=1, | ||
5518 | compact="dataset_datasetType", | ||
5519 | ) | ||
5520 | # Describes a mechanism to update the dataset. | ||
5521 | cls._add_property( | ||
5522 | "dataset_datasetUpdateMechanism", | ||
5523 | StringProp(), | ||
5524 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetUpdateMechanism", | ||
5525 | compact="dataset_datasetUpdateMechanism", | ||
5526 | ) | ||
5527 | # Describes if any sensitive personal information is present in the dataset. | ||
5528 | cls._add_property( | ||
5529 | "dataset_hasSensitivePersonalInformation", | ||
5530 | EnumProp([ | ||
5531 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"), | ||
5532 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"), | ||
5533 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"), | ||
5534 | ]), | ||
5535 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/hasSensitivePersonalInformation", | ||
5536 | compact="dataset_hasSensitivePersonalInformation", | ||
5537 | ) | ||
5538 | # Describes what the given dataset should be used for. | ||
5539 | cls._add_property( | ||
5540 | "dataset_intendedUse", | ||
5541 | StringProp(), | ||
5542 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/intendedUse", | ||
5543 | compact="dataset_intendedUse", | ||
5544 | ) | ||
5545 | # Records the biases that the dataset is known to encompass. | ||
5546 | cls._add_property( | ||
5547 | "dataset_knownBias", | ||
5548 | ListProp(StringProp()), | ||
5549 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/knownBias", | ||
5550 | compact="dataset_knownBias", | ||
5551 | ) | ||
5552 | # Describes a sensor used for collecting the data. | ||
5553 | cls._add_property( | ||
5554 | "dataset_sensor", | ||
5555 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
5556 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/sensor", | ||
5557 | compact="dataset_sensor", | ||
5558 | ) | ||
5559 | |||
5560 | |||
5561 | """Format Guard""" | ||
5562 | # fmt: on | ||
5563 | |||
5564 | |||
5565 | def main(): | ||
5566 | import argparse | ||
5567 | from pathlib import Path | ||
5568 | |||
5569 | parser = argparse.ArgumentParser(description="Python SHACL model test") | ||
5570 | parser.add_argument("infile", type=Path, help="Input file") | ||
5571 | parser.add_argument("--print", action="store_true", help="Print object tree") | ||
5572 | parser.add_argument("--outfile", type=Path, help="Output file") | ||
5573 | |||
5574 | args = parser.parse_args() | ||
5575 | |||
5576 | objectset = SHACLObjectSet() | ||
5577 | with args.infile.open("r") as f: | ||
5578 | d = JSONLDDeserializer() | ||
5579 | d.read(f, objectset) | ||
5580 | |||
5581 | if args.print: | ||
5582 | print_tree(objectset.objects) | ||
5583 | |||
5584 | if args.outfile: | ||
5585 | with args.outfile.open("wb") as f: | ||
5586 | s = JSONLDSerializer() | ||
5587 | s.write(objectset, f) | ||
5588 | |||
5589 | return 0 | ||
5590 | |||
5591 | |||
5592 | if __name__ == "__main__": | ||
5593 | sys.exit(main()) | ||
diff --git a/meta/lib/oe/spdx30_tasks.py b/meta/lib/oe/spdx30_tasks.py new file mode 100644 index 0000000000..5d9f3168d9 --- /dev/null +++ b/meta/lib/oe/spdx30_tasks.py | |||
@@ -0,0 +1,1368 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import json | ||
8 | import oe.cve_check | ||
9 | import oe.packagedata | ||
10 | import oe.patch | ||
11 | import oe.sbom30 | ||
12 | import oe.spdx30 | ||
13 | import oe.spdx_common | ||
14 | import oe.sdk | ||
15 | import os | ||
16 | |||
17 | from contextlib import contextmanager | ||
18 | from datetime import datetime, timezone | ||
19 | from pathlib import Path | ||
20 | |||
21 | |||
22 | def walk_error(err): | ||
23 | bb.error(f"ERROR walking {err.filename}: {err}") | ||
24 | |||
25 | |||
26 | def set_timestamp_now(d, o, prop): | ||
27 | if d.getVar("SPDX_INCLUDE_TIMESTAMPS") == "1": | ||
28 | setattr(o, prop, datetime.now(timezone.utc)) | ||
29 | else: | ||
30 | # Doing this helps to validated that the property actually exists, and | ||
31 | # also that it is not mandatory | ||
32 | delattr(o, prop) | ||
33 | |||
34 | |||
35 | def add_license_expression(d, objset, license_expression, license_data): | ||
36 | simple_license_text = {} | ||
37 | license_text_map = {} | ||
38 | license_ref_idx = 0 | ||
39 | |||
40 | def add_license_text(name): | ||
41 | nonlocal objset | ||
42 | nonlocal simple_license_text | ||
43 | |||
44 | if name in simple_license_text: | ||
45 | return simple_license_text[name] | ||
46 | |||
47 | lic = objset.find_filter( | ||
48 | oe.spdx30.simplelicensing_SimpleLicensingText, | ||
49 | name=name, | ||
50 | ) | ||
51 | |||
52 | if lic is not None: | ||
53 | simple_license_text[name] = lic | ||
54 | return lic | ||
55 | |||
56 | lic = objset.add( | ||
57 | oe.spdx30.simplelicensing_SimpleLicensingText( | ||
58 | _id=objset.new_spdxid("license-text", name), | ||
59 | creationInfo=objset.doc.creationInfo, | ||
60 | name=name, | ||
61 | ) | ||
62 | ) | ||
63 | objset.set_element_alias(lic) | ||
64 | simple_license_text[name] = lic | ||
65 | |||
66 | if name == "PD": | ||
67 | lic.simplelicensing_licenseText = "Software released to the public domain" | ||
68 | return lic | ||
69 | |||
70 | # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH | ||
71 | for directory in [d.getVar("COMMON_LICENSE_DIR")] + ( | ||
72 | d.getVar("LICENSE_PATH") or "" | ||
73 | ).split(): | ||
74 | try: | ||
75 | with (Path(directory) / name).open(errors="replace") as f: | ||
76 | lic.simplelicensing_licenseText = f.read() | ||
77 | return lic | ||
78 | |||
79 | except FileNotFoundError: | ||
80 | pass | ||
81 | |||
82 | # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set | ||
83 | filename = d.getVarFlag("NO_GENERIC_LICENSE", name) | ||
84 | if filename: | ||
85 | filename = d.expand("${S}/" + filename) | ||
86 | with open(filename, errors="replace") as f: | ||
87 | lic.simplelicensing_licenseText = f.read() | ||
88 | return lic | ||
89 | else: | ||
90 | bb.fatal("Cannot find any text for license %s" % name) | ||
91 | |||
92 | def convert(l): | ||
93 | nonlocal license_text_map | ||
94 | nonlocal license_ref_idx | ||
95 | |||
96 | if l == "(" or l == ")": | ||
97 | return l | ||
98 | |||
99 | if l == "&": | ||
100 | return "AND" | ||
101 | |||
102 | if l == "|": | ||
103 | return "OR" | ||
104 | |||
105 | if l == "CLOSED": | ||
106 | return "NONE" | ||
107 | |||
108 | spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l | ||
109 | if spdx_license in license_data["licenses"]: | ||
110 | return spdx_license | ||
111 | |||
112 | spdx_license = "LicenseRef-" + l | ||
113 | if spdx_license not in license_text_map: | ||
114 | license_text_map[spdx_license] = oe.sbom30.get_element_link_id( | ||
115 | add_license_text(l) | ||
116 | ) | ||
117 | |||
118 | return spdx_license | ||
119 | |||
120 | lic_split = ( | ||
121 | license_expression.replace("(", " ( ") | ||
122 | .replace(")", " ) ") | ||
123 | .replace("|", " | ") | ||
124 | .replace("&", " & ") | ||
125 | .split() | ||
126 | ) | ||
127 | spdx_license_expression = " ".join(convert(l) for l in lic_split) | ||
128 | |||
129 | o = objset.new_license_expression( | ||
130 | spdx_license_expression, license_data, license_text_map | ||
131 | ) | ||
132 | objset.set_element_alias(o) | ||
133 | return o | ||
134 | |||
135 | |||
136 | def add_package_files( | ||
137 | d, | ||
138 | objset, | ||
139 | topdir, | ||
140 | get_spdxid, | ||
141 | get_purposes, | ||
142 | license_data=None, | ||
143 | *, | ||
144 | archive=None, | ||
145 | ignore_dirs=[], | ||
146 | ignore_top_level_dirs=[], | ||
147 | ): | ||
148 | source_date_epoch = d.getVar("SOURCE_DATE_EPOCH") | ||
149 | if source_date_epoch: | ||
150 | source_date_epoch = int(source_date_epoch) | ||
151 | |||
152 | spdx_files = set() | ||
153 | |||
154 | file_counter = 1 | ||
155 | if not os.path.exists(topdir): | ||
156 | bb.note(f"Skip {topdir}") | ||
157 | return spdx_files | ||
158 | |||
159 | check_compiled_sources = d.getVar("SPDX_INCLUDE_COMPILED_SOURCES") == "1" | ||
160 | if check_compiled_sources: | ||
161 | compiled_sources, types = oe.spdx_common.get_compiled_sources(d) | ||
162 | bb.debug(1, f"Total compiled files: {len(compiled_sources)}") | ||
163 | |||
164 | for subdir, dirs, files in os.walk(topdir, onerror=walk_error): | ||
165 | dirs[:] = [d for d in dirs if d not in ignore_dirs] | ||
166 | if subdir == str(topdir): | ||
167 | dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs] | ||
168 | |||
169 | dirs.sort() | ||
170 | files.sort() | ||
171 | for file in files: | ||
172 | filepath = Path(subdir) / file | ||
173 | if filepath.is_symlink() or not filepath.is_file(): | ||
174 | continue | ||
175 | |||
176 | filename = str(filepath.relative_to(topdir)) | ||
177 | file_purposes = get_purposes(filepath) | ||
178 | |||
179 | # Check if file is compiled | ||
180 | if check_compiled_sources: | ||
181 | if not oe.spdx_common.is_compiled_source(filename, compiled_sources, types): | ||
182 | continue | ||
183 | |||
184 | spdx_file = objset.new_file( | ||
185 | get_spdxid(file_counter), | ||
186 | filename, | ||
187 | filepath, | ||
188 | purposes=file_purposes, | ||
189 | ) | ||
190 | spdx_files.add(spdx_file) | ||
191 | |||
192 | if ( | ||
193 | oe.spdx30.software_SoftwarePurpose.source in file_purposes | ||
194 | and license_data is not None | ||
195 | ): | ||
196 | objset.scan_declared_licenses(spdx_file, filepath, license_data) | ||
197 | |||
198 | if archive is not None: | ||
199 | with filepath.open("rb") as f: | ||
200 | info = archive.gettarinfo(fileobj=f) | ||
201 | info.name = filename | ||
202 | info.uid = 0 | ||
203 | info.gid = 0 | ||
204 | info.uname = "root" | ||
205 | info.gname = "root" | ||
206 | |||
207 | if source_date_epoch is not None and info.mtime > source_date_epoch: | ||
208 | info.mtime = source_date_epoch | ||
209 | |||
210 | archive.addfile(info, f) | ||
211 | |||
212 | file_counter += 1 | ||
213 | |||
214 | bb.debug(1, "Added %d files to %s" % (len(spdx_files), objset.doc._id)) | ||
215 | |||
216 | return spdx_files | ||
217 | |||
218 | |||
219 | def get_package_sources_from_debug( | ||
220 | d, package, package_files, sources, source_hash_cache | ||
221 | ): | ||
222 | def file_path_match(file_path, pkg_file): | ||
223 | if file_path.lstrip("/") == pkg_file.name.lstrip("/"): | ||
224 | return True | ||
225 | |||
226 | for e in pkg_file.extension: | ||
227 | if isinstance(e, oe.sbom30.OEFileNameAliasExtension): | ||
228 | for a in e.aliases: | ||
229 | if file_path.lstrip("/") == a.lstrip("/"): | ||
230 | return True | ||
231 | |||
232 | return False | ||
233 | |||
234 | debug_search_paths = [ | ||
235 | Path(d.getVar("SPDXWORK")), | ||
236 | Path(d.getVar("PKGD")), | ||
237 | Path(d.getVar("STAGING_DIR_TARGET")), | ||
238 | Path(d.getVar("STAGING_DIR_NATIVE")), | ||
239 | Path(d.getVar("STAGING_KERNEL_DIR")), | ||
240 | ] | ||
241 | |||
242 | pkg_data = oe.packagedata.read_subpkgdata_extended(package, d) | ||
243 | |||
244 | if pkg_data is None: | ||
245 | return | ||
246 | |||
247 | dep_source_files = set() | ||
248 | |||
249 | for file_path, file_data in pkg_data["files_info"].items(): | ||
250 | if not "debugsrc" in file_data: | ||
251 | continue | ||
252 | |||
253 | if not any(file_path_match(file_path, pkg_file) for pkg_file in package_files): | ||
254 | bb.fatal( | ||
255 | "No package file found for %s in %s; SPDX found: %s" | ||
256 | % (str(file_path), package, " ".join(p.name for p in package_files)) | ||
257 | ) | ||
258 | continue | ||
259 | |||
260 | for debugsrc in file_data["debugsrc"]: | ||
261 | for search in debug_search_paths: | ||
262 | if debugsrc.startswith("/usr/src/kernel"): | ||
263 | debugsrc_path = search / debugsrc.replace("/usr/src/kernel/", "") | ||
264 | else: | ||
265 | debugsrc_path = search / debugsrc.lstrip("/") | ||
266 | |||
267 | if debugsrc_path in source_hash_cache: | ||
268 | file_sha256 = source_hash_cache[debugsrc_path] | ||
269 | if file_sha256 is None: | ||
270 | continue | ||
271 | else: | ||
272 | # We can only hash files below, skip directories, links, etc. | ||
273 | if not debugsrc_path.is_file(): | ||
274 | source_hash_cache[debugsrc_path] = None | ||
275 | continue | ||
276 | |||
277 | file_sha256 = bb.utils.sha256_file(debugsrc_path) | ||
278 | source_hash_cache[debugsrc_path] = file_sha256 | ||
279 | |||
280 | if file_sha256 in sources: | ||
281 | source_file = sources[file_sha256] | ||
282 | dep_source_files.add(source_file) | ||
283 | else: | ||
284 | bb.debug( | ||
285 | 1, | ||
286 | "Debug source %s with SHA256 %s not found in any dependency" | ||
287 | % (str(debugsrc_path), file_sha256), | ||
288 | ) | ||
289 | break | ||
290 | else: | ||
291 | bb.debug(1, "Debug source %s not found" % debugsrc) | ||
292 | |||
293 | return dep_source_files | ||
294 | |||
295 | |||
296 | def collect_dep_objsets(d, build): | ||
297 | deps = oe.spdx_common.get_spdx_deps(d) | ||
298 | |||
299 | dep_objsets = [] | ||
300 | dep_builds = set() | ||
301 | |||
302 | dep_build_spdxids = set() | ||
303 | for dep in deps: | ||
304 | bb.debug(1, "Fetching SPDX for dependency %s" % (dep.pn)) | ||
305 | dep_build, dep_objset = oe.sbom30.find_root_obj_in_jsonld( | ||
306 | d, "recipes", "recipe-" + dep.pn, oe.spdx30.build_Build | ||
307 | ) | ||
308 | # If the dependency is part of the taskhash, return it to be linked | ||
309 | # against. Otherwise, it cannot be linked against because this recipe | ||
310 | # will not rebuilt if dependency changes | ||
311 | if dep.in_taskhash: | ||
312 | dep_objsets.append(dep_objset) | ||
313 | |||
314 | # The build _can_ be linked against (by alias) | ||
315 | dep_builds.add(dep_build) | ||
316 | |||
317 | return dep_objsets, dep_builds | ||
318 | |||
319 | |||
320 | def index_sources_by_hash(sources, dest): | ||
321 | for s in sources: | ||
322 | if not isinstance(s, oe.spdx30.software_File): | ||
323 | continue | ||
324 | |||
325 | if s.software_primaryPurpose != oe.spdx30.software_SoftwarePurpose.source: | ||
326 | continue | ||
327 | |||
328 | for v in s.verifiedUsing: | ||
329 | if v.algorithm == oe.spdx30.HashAlgorithm.sha256: | ||
330 | if not v.hashValue in dest: | ||
331 | dest[v.hashValue] = s | ||
332 | break | ||
333 | else: | ||
334 | bb.fatal(f"No SHA256 found for {s.name}") | ||
335 | |||
336 | |||
337 | def collect_dep_sources(dep_objsets, dest): | ||
338 | for objset in dep_objsets: | ||
339 | # Don't collect sources from native recipes as they | ||
340 | # match non-native sources also. | ||
341 | if objset.is_native(): | ||
342 | continue | ||
343 | |||
344 | bb.debug(1, "Fetching Sources for dependency %s" % (objset.doc.name)) | ||
345 | |||
346 | dep_build = objset.find_root(oe.spdx30.build_Build) | ||
347 | if not dep_build: | ||
348 | bb.fatal("Unable to find a build") | ||
349 | |||
350 | for e in objset.foreach_type(oe.spdx30.Relationship): | ||
351 | if dep_build is not e.from_: | ||
352 | continue | ||
353 | |||
354 | if e.relationshipType != oe.spdx30.RelationshipType.hasInput: | ||
355 | continue | ||
356 | |||
357 | index_sources_by_hash(e.to, dest) | ||
358 | |||
359 | |||
360 | def add_download_files(d, objset): | ||
361 | inputs = set() | ||
362 | |||
363 | urls = d.getVar("SRC_URI").split() | ||
364 | fetch = bb.fetch2.Fetch(urls, d) | ||
365 | |||
366 | for download_idx, src_uri in enumerate(urls): | ||
367 | fd = fetch.ud[src_uri] | ||
368 | |||
369 | file_name = os.path.basename(fetch.localpath(src_uri)) | ||
370 | if oe.patch.patch_path(src_uri, fetch, "", expand=False): | ||
371 | primary_purpose = oe.spdx30.software_SoftwarePurpose.patch | ||
372 | else: | ||
373 | primary_purpose = oe.spdx30.software_SoftwarePurpose.source | ||
374 | |||
375 | if fd.type == "file": | ||
376 | if os.path.isdir(fd.localpath): | ||
377 | walk_idx = 1 | ||
378 | for root, dirs, files in os.walk(fd.localpath, onerror=walk_error): | ||
379 | dirs.sort() | ||
380 | files.sort() | ||
381 | for f in files: | ||
382 | f_path = os.path.join(root, f) | ||
383 | if os.path.islink(f_path): | ||
384 | # TODO: SPDX doesn't support symlinks yet | ||
385 | continue | ||
386 | |||
387 | file = objset.new_file( | ||
388 | objset.new_spdxid( | ||
389 | "source", str(download_idx + 1), str(walk_idx) | ||
390 | ), | ||
391 | os.path.join( | ||
392 | file_name, os.path.relpath(f_path, fd.localpath) | ||
393 | ), | ||
394 | f_path, | ||
395 | purposes=[primary_purpose], | ||
396 | ) | ||
397 | |||
398 | inputs.add(file) | ||
399 | walk_idx += 1 | ||
400 | |||
401 | else: | ||
402 | file = objset.new_file( | ||
403 | objset.new_spdxid("source", str(download_idx + 1)), | ||
404 | file_name, | ||
405 | fd.localpath, | ||
406 | purposes=[primary_purpose], | ||
407 | ) | ||
408 | inputs.add(file) | ||
409 | |||
410 | else: | ||
411 | dl = objset.add( | ||
412 | oe.spdx30.software_Package( | ||
413 | _id=objset.new_spdxid("source", str(download_idx + 1)), | ||
414 | creationInfo=objset.doc.creationInfo, | ||
415 | name=file_name, | ||
416 | software_primaryPurpose=primary_purpose, | ||
417 | software_downloadLocation=oe.spdx_common.fetch_data_to_uri( | ||
418 | fd, fd.name | ||
419 | ), | ||
420 | ) | ||
421 | ) | ||
422 | |||
423 | if fd.method.supports_checksum(fd): | ||
424 | # TODO Need something better than hard coding this | ||
425 | for checksum_id in ["sha256", "sha1"]: | ||
426 | expected_checksum = getattr( | ||
427 | fd, "%s_expected" % checksum_id, None | ||
428 | ) | ||
429 | if expected_checksum is None: | ||
430 | continue | ||
431 | |||
432 | dl.verifiedUsing.append( | ||
433 | oe.spdx30.Hash( | ||
434 | algorithm=getattr(oe.spdx30.HashAlgorithm, checksum_id), | ||
435 | hashValue=expected_checksum, | ||
436 | ) | ||
437 | ) | ||
438 | |||
439 | inputs.add(dl) | ||
440 | |||
441 | return inputs | ||
442 | |||
443 | |||
444 | def set_purposes(d, element, *var_names, force_purposes=[]): | ||
445 | purposes = force_purposes[:] | ||
446 | |||
447 | for var_name in var_names: | ||
448 | val = d.getVar(var_name) | ||
449 | if val: | ||
450 | purposes.extend(val.split()) | ||
451 | break | ||
452 | |||
453 | if not purposes: | ||
454 | bb.warn("No SPDX purposes found in %s" % " ".join(var_names)) | ||
455 | return | ||
456 | |||
457 | element.software_primaryPurpose = getattr( | ||
458 | oe.spdx30.software_SoftwarePurpose, purposes[0] | ||
459 | ) | ||
460 | element.software_additionalPurpose = [ | ||
461 | getattr(oe.spdx30.software_SoftwarePurpose, p) for p in purposes[1:] | ||
462 | ] | ||
463 | |||
464 | |||
465 | def create_spdx(d): | ||
466 | def set_var_field(var, obj, name, package=None): | ||
467 | val = None | ||
468 | if package: | ||
469 | val = d.getVar("%s:%s" % (var, package)) | ||
470 | |||
471 | if not val: | ||
472 | val = d.getVar(var) | ||
473 | |||
474 | if val: | ||
475 | setattr(obj, name, val) | ||
476 | |||
477 | license_data = oe.spdx_common.load_spdx_license_data(d) | ||
478 | |||
479 | deploydir = Path(d.getVar("SPDXDEPLOY")) | ||
480 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
481 | spdx_workdir = Path(d.getVar("SPDXWORK")) | ||
482 | include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1" | ||
483 | pkg_arch = d.getVar("SSTATE_PKGARCH") | ||
484 | is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class( | ||
485 | "cross", d | ||
486 | ) | ||
487 | include_vex = d.getVar("SPDX_INCLUDE_VEX") | ||
488 | if not include_vex in ("none", "current", "all"): | ||
489 | bb.fatal("SPDX_INCLUDE_VEX must be one of 'none', 'current', 'all'") | ||
490 | |||
491 | build_objset = oe.sbom30.ObjectSet.new_objset(d, "recipe-" + d.getVar("PN")) | ||
492 | |||
493 | build = build_objset.new_task_build("recipe", "recipe") | ||
494 | build_objset.set_element_alias(build) | ||
495 | |||
496 | build_objset.doc.rootElement.append(build) | ||
497 | |||
498 | build_objset.set_is_native(is_native) | ||
499 | |||
500 | for var in (d.getVar("SPDX_CUSTOM_ANNOTATION_VARS") or "").split(): | ||
501 | new_annotation( | ||
502 | d, | ||
503 | build_objset, | ||
504 | build, | ||
505 | "%s=%s" % (var, d.getVar(var)), | ||
506 | oe.spdx30.AnnotationType.other, | ||
507 | ) | ||
508 | |||
509 | build_inputs = set() | ||
510 | |||
511 | # Add CVEs | ||
512 | cve_by_status = {} | ||
513 | if include_vex != "none": | ||
514 | patched_cves = oe.cve_check.get_patched_cves(d) | ||
515 | for cve, patched_cve in patched_cves.items(): | ||
516 | decoded_status = { | ||
517 | "mapping": patched_cve["abbrev-status"], | ||
518 | "detail": patched_cve["status"], | ||
519 | "description": patched_cve.get("justification", None) | ||
520 | } | ||
521 | |||
522 | # If this CVE is fixed upstream, skip it unless all CVEs are | ||
523 | # specified. | ||
524 | if ( | ||
525 | include_vex != "all" | ||
526 | and "detail" in decoded_status | ||
527 | and decoded_status["detail"] | ||
528 | in ( | ||
529 | "fixed-version", | ||
530 | "cpe-stable-backport", | ||
531 | ) | ||
532 | ): | ||
533 | bb.debug(1, "Skipping %s since it is already fixed upstream" % cve) | ||
534 | continue | ||
535 | |||
536 | spdx_cve = build_objset.new_cve_vuln(cve) | ||
537 | build_objset.set_element_alias(spdx_cve) | ||
538 | |||
539 | cve_by_status.setdefault(decoded_status["mapping"], {})[cve] = ( | ||
540 | spdx_cve, | ||
541 | decoded_status["detail"], | ||
542 | decoded_status["description"], | ||
543 | ) | ||
544 | |||
545 | cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION")) | ||
546 | |||
547 | source_files = add_download_files(d, build_objset) | ||
548 | build_inputs |= source_files | ||
549 | |||
550 | recipe_spdx_license = add_license_expression( | ||
551 | d, build_objset, d.getVar("LICENSE"), license_data | ||
552 | ) | ||
553 | build_objset.new_relationship( | ||
554 | source_files, | ||
555 | oe.spdx30.RelationshipType.hasDeclaredLicense, | ||
556 | [oe.sbom30.get_element_link_id(recipe_spdx_license)], | ||
557 | ) | ||
558 | |||
559 | dep_sources = {} | ||
560 | if oe.spdx_common.process_sources(d) and include_sources: | ||
561 | bb.debug(1, "Adding source files to SPDX") | ||
562 | oe.spdx_common.get_patched_src(d) | ||
563 | |||
564 | files = add_package_files( | ||
565 | d, | ||
566 | build_objset, | ||
567 | spdx_workdir, | ||
568 | lambda file_counter: build_objset.new_spdxid( | ||
569 | "sourcefile", str(file_counter) | ||
570 | ), | ||
571 | lambda filepath: [oe.spdx30.software_SoftwarePurpose.source], | ||
572 | license_data, | ||
573 | ignore_dirs=[".git"], | ||
574 | ignore_top_level_dirs=["temp"], | ||
575 | archive=None, | ||
576 | ) | ||
577 | build_inputs |= files | ||
578 | index_sources_by_hash(files, dep_sources) | ||
579 | |||
580 | dep_objsets, dep_builds = collect_dep_objsets(d, build) | ||
581 | if dep_builds: | ||
582 | build_objset.new_scoped_relationship( | ||
583 | [build], | ||
584 | oe.spdx30.RelationshipType.dependsOn, | ||
585 | oe.spdx30.LifecycleScopeType.build, | ||
586 | sorted(oe.sbom30.get_element_link_id(b) for b in dep_builds), | ||
587 | ) | ||
588 | |||
589 | debug_source_ids = set() | ||
590 | source_hash_cache = {} | ||
591 | |||
592 | # Write out the package SPDX data now. It is not complete as we cannot | ||
593 | # write the runtime data, so write it to a staging area and a later task | ||
594 | # will write out the final collection | ||
595 | |||
596 | # TODO: Handle native recipe output | ||
597 | if not is_native: | ||
598 | bb.debug(1, "Collecting Dependency sources files") | ||
599 | collect_dep_sources(dep_objsets, dep_sources) | ||
600 | |||
601 | bb.build.exec_func("read_subpackage_metadata", d) | ||
602 | |||
603 | pkgdest = Path(d.getVar("PKGDEST")) | ||
604 | for package in d.getVar("PACKAGES").split(): | ||
605 | if not oe.packagedata.packaged(package, d): | ||
606 | continue | ||
607 | |||
608 | pkg_name = d.getVar("PKG:%s" % package) or package | ||
609 | |||
610 | bb.debug(1, "Creating SPDX for package %s" % pkg_name) | ||
611 | |||
612 | pkg_objset = oe.sbom30.ObjectSet.new_objset(d, "package-" + pkg_name) | ||
613 | |||
614 | spdx_package = pkg_objset.add_root( | ||
615 | oe.spdx30.software_Package( | ||
616 | _id=pkg_objset.new_spdxid("package", pkg_name), | ||
617 | creationInfo=pkg_objset.doc.creationInfo, | ||
618 | name=pkg_name, | ||
619 | software_packageVersion=d.getVar("SPDX_PACKAGE_VERSION"), | ||
620 | ) | ||
621 | ) | ||
622 | set_timestamp_now(d, spdx_package, "builtTime") | ||
623 | |||
624 | set_purposes( | ||
625 | d, | ||
626 | spdx_package, | ||
627 | "SPDX_PACKAGE_ADDITIONAL_PURPOSE:%s" % package, | ||
628 | "SPDX_PACKAGE_ADDITIONAL_PURPOSE", | ||
629 | force_purposes=["install"], | ||
630 | ) | ||
631 | |||
632 | supplier = build_objset.new_agent("SPDX_PACKAGE_SUPPLIER") | ||
633 | if supplier is not None: | ||
634 | spdx_package.suppliedBy = ( | ||
635 | supplier if isinstance(supplier, str) else supplier._id | ||
636 | ) | ||
637 | |||
638 | set_var_field( | ||
639 | "HOMEPAGE", spdx_package, "software_homePage", package=package | ||
640 | ) | ||
641 | set_var_field("SUMMARY", spdx_package, "summary", package=package) | ||
642 | set_var_field("DESCRIPTION", spdx_package, "description", package=package) | ||
643 | |||
644 | if d.getVar("SPDX_PACKAGE_URL:%s" % package) or d.getVar("SPDX_PACKAGE_URL"): | ||
645 | set_var_field( | ||
646 | "SPDX_PACKAGE_URL", | ||
647 | spdx_package, | ||
648 | "software_packageUrl", | ||
649 | package=package | ||
650 | ) | ||
651 | |||
652 | pkg_objset.new_scoped_relationship( | ||
653 | [oe.sbom30.get_element_link_id(build)], | ||
654 | oe.spdx30.RelationshipType.hasOutput, | ||
655 | oe.spdx30.LifecycleScopeType.build, | ||
656 | [spdx_package], | ||
657 | ) | ||
658 | |||
659 | for cpe_id in cpe_ids: | ||
660 | spdx_package.externalIdentifier.append( | ||
661 | oe.spdx30.ExternalIdentifier( | ||
662 | externalIdentifierType=oe.spdx30.ExternalIdentifierType.cpe23, | ||
663 | identifier=cpe_id, | ||
664 | ) | ||
665 | ) | ||
666 | |||
667 | # TODO: Generate a file for each actual IPK/DEB/RPM/TGZ file | ||
668 | # generated and link it to the package | ||
669 | # spdx_package_file = pkg_objset.add(oe.spdx30.software_File( | ||
670 | # _id=pkg_objset.new_spdxid("distribution", pkg_name), | ||
671 | # creationInfo=pkg_objset.doc.creationInfo, | ||
672 | # name=pkg_name, | ||
673 | # software_primaryPurpose=spdx_package.software_primaryPurpose, | ||
674 | # software_additionalPurpose=spdx_package.software_additionalPurpose, | ||
675 | # )) | ||
676 | # set_timestamp_now(d, spdx_package_file, "builtTime") | ||
677 | |||
678 | ## TODO add hashes | ||
679 | # pkg_objset.new_relationship( | ||
680 | # [spdx_package], | ||
681 | # oe.spdx30.RelationshipType.hasDistributionArtifact, | ||
682 | # [spdx_package_file], | ||
683 | # ) | ||
684 | |||
685 | # NOTE: licenses live in the recipe collection and are referenced | ||
686 | # by ID in the package collection(s). This helps reduce duplication | ||
687 | # (since a lot of packages will have the same license), and also | ||
688 | # prevents duplicate license SPDX IDs in the packages | ||
689 | package_license = d.getVar("LICENSE:%s" % package) | ||
690 | if package_license and package_license != d.getVar("LICENSE"): | ||
691 | package_spdx_license = add_license_expression( | ||
692 | d, build_objset, package_license, license_data | ||
693 | ) | ||
694 | else: | ||
695 | package_spdx_license = recipe_spdx_license | ||
696 | |||
697 | pkg_objset.new_relationship( | ||
698 | [spdx_package], | ||
699 | oe.spdx30.RelationshipType.hasConcludedLicense, | ||
700 | [oe.sbom30.get_element_link_id(package_spdx_license)], | ||
701 | ) | ||
702 | |||
703 | # NOTE: CVE Elements live in the recipe collection | ||
704 | all_cves = set() | ||
705 | for status, cves in cve_by_status.items(): | ||
706 | for cve, items in cves.items(): | ||
707 | spdx_cve, detail, description = items | ||
708 | spdx_cve_id = oe.sbom30.get_element_link_id(spdx_cve) | ||
709 | |||
710 | all_cves.add(spdx_cve_id) | ||
711 | |||
712 | if status == "Patched": | ||
713 | pkg_objset.new_vex_patched_relationship( | ||
714 | [spdx_cve_id], [spdx_package] | ||
715 | ) | ||
716 | elif status == "Unpatched": | ||
717 | pkg_objset.new_vex_unpatched_relationship( | ||
718 | [spdx_cve_id], [spdx_package] | ||
719 | ) | ||
720 | elif status == "Ignored": | ||
721 | spdx_vex = pkg_objset.new_vex_ignored_relationship( | ||
722 | [spdx_cve_id], | ||
723 | [spdx_package], | ||
724 | impact_statement=description, | ||
725 | ) | ||
726 | |||
727 | if detail in ( | ||
728 | "ignored", | ||
729 | "cpe-incorrect", | ||
730 | "disputed", | ||
731 | "upstream-wontfix", | ||
732 | ): | ||
733 | # VEX doesn't have justifications for this | ||
734 | pass | ||
735 | elif detail in ( | ||
736 | "not-applicable-config", | ||
737 | "not-applicable-platform", | ||
738 | ): | ||
739 | for v in spdx_vex: | ||
740 | v.security_justificationType = ( | ||
741 | oe.spdx30.security_VexJustificationType.vulnerableCodeNotPresent | ||
742 | ) | ||
743 | else: | ||
744 | bb.fatal(f"Unknown detail '{detail}' for ignored {cve}") | ||
745 | elif status == "Unknown": | ||
746 | bb.note(f"Skipping {cve} with status 'Unknown'") | ||
747 | else: | ||
748 | bb.fatal(f"Unknown {cve} status '{status}'") | ||
749 | |||
750 | if all_cves: | ||
751 | pkg_objset.new_relationship( | ||
752 | [spdx_package], | ||
753 | oe.spdx30.RelationshipType.hasAssociatedVulnerability, | ||
754 | sorted(list(all_cves)), | ||
755 | ) | ||
756 | |||
757 | bb.debug(1, "Adding package files to SPDX for package %s" % pkg_name) | ||
758 | package_files = add_package_files( | ||
759 | d, | ||
760 | pkg_objset, | ||
761 | pkgdest / package, | ||
762 | lambda file_counter: pkg_objset.new_spdxid( | ||
763 | "package", pkg_name, "file", str(file_counter) | ||
764 | ), | ||
765 | # TODO: Can we know the purpose here? | ||
766 | lambda filepath: [], | ||
767 | license_data, | ||
768 | ignore_top_level_dirs=["CONTROL", "DEBIAN"], | ||
769 | archive=None, | ||
770 | ) | ||
771 | |||
772 | if package_files: | ||
773 | pkg_objset.new_relationship( | ||
774 | [spdx_package], | ||
775 | oe.spdx30.RelationshipType.contains, | ||
776 | sorted(list(package_files)), | ||
777 | ) | ||
778 | |||
779 | if include_sources: | ||
780 | debug_sources = get_package_sources_from_debug( | ||
781 | d, package, package_files, dep_sources, source_hash_cache | ||
782 | ) | ||
783 | debug_source_ids |= set( | ||
784 | oe.sbom30.get_element_link_id(d) for d in debug_sources | ||
785 | ) | ||
786 | |||
787 | oe.sbom30.write_recipe_jsonld_doc( | ||
788 | d, pkg_objset, "packages-staging", deploydir, create_spdx_id_links=False | ||
789 | ) | ||
790 | |||
791 | if include_sources: | ||
792 | bb.debug(1, "Adding sysroot files to SPDX") | ||
793 | sysroot_files = add_package_files( | ||
794 | d, | ||
795 | build_objset, | ||
796 | d.expand("${COMPONENTS_DIR}/${PACKAGE_ARCH}/${PN}"), | ||
797 | lambda file_counter: build_objset.new_spdxid("sysroot", str(file_counter)), | ||
798 | lambda filepath: [], | ||
799 | license_data, | ||
800 | archive=None, | ||
801 | ) | ||
802 | |||
803 | if sysroot_files: | ||
804 | build_objset.new_scoped_relationship( | ||
805 | [build], | ||
806 | oe.spdx30.RelationshipType.hasOutput, | ||
807 | oe.spdx30.LifecycleScopeType.build, | ||
808 | sorted(list(sysroot_files)), | ||
809 | ) | ||
810 | |||
811 | if build_inputs or debug_source_ids: | ||
812 | build_objset.new_scoped_relationship( | ||
813 | [build], | ||
814 | oe.spdx30.RelationshipType.hasInput, | ||
815 | oe.spdx30.LifecycleScopeType.build, | ||
816 | sorted(list(build_inputs)) + sorted(list(debug_source_ids)), | ||
817 | ) | ||
818 | |||
819 | oe.sbom30.write_recipe_jsonld_doc(d, build_objset, "recipes", deploydir) | ||
820 | |||
821 | |||
822 | def create_package_spdx(d): | ||
823 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
824 | deploydir = Path(d.getVar("SPDXRUNTIMEDEPLOY")) | ||
825 | is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class( | ||
826 | "cross", d | ||
827 | ) | ||
828 | |||
829 | providers = oe.spdx_common.collect_package_providers(d) | ||
830 | pkg_arch = d.getVar("SSTATE_PKGARCH") | ||
831 | |||
832 | if is_native: | ||
833 | return | ||
834 | |||
835 | bb.build.exec_func("read_subpackage_metadata", d) | ||
836 | |||
837 | dep_package_cache = {} | ||
838 | |||
839 | # Any element common to all packages that need to be referenced by ID | ||
840 | # should be written into this objset set | ||
841 | common_objset = oe.sbom30.ObjectSet.new_objset( | ||
842 | d, "%s-package-common" % d.getVar("PN") | ||
843 | ) | ||
844 | |||
845 | pkgdest = Path(d.getVar("PKGDEST")) | ||
846 | for package in d.getVar("PACKAGES").split(): | ||
847 | localdata = bb.data.createCopy(d) | ||
848 | pkg_name = d.getVar("PKG:%s" % package) or package | ||
849 | localdata.setVar("PKG", pkg_name) | ||
850 | localdata.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + ":" + package) | ||
851 | |||
852 | if not oe.packagedata.packaged(package, localdata): | ||
853 | continue | ||
854 | |||
855 | spdx_package, pkg_objset = oe.sbom30.load_obj_in_jsonld( | ||
856 | d, | ||
857 | pkg_arch, | ||
858 | "packages-staging", | ||
859 | "package-" + pkg_name, | ||
860 | oe.spdx30.software_Package, | ||
861 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install, | ||
862 | ) | ||
863 | |||
864 | # We will write out a new collection, so link it to the new | ||
865 | # creation info in the common package data. The old creation info | ||
866 | # should still exist and be referenced by all the existing elements | ||
867 | # in the package | ||
868 | pkg_objset.creationInfo = pkg_objset.copy_creation_info( | ||
869 | common_objset.doc.creationInfo | ||
870 | ) | ||
871 | |||
872 | runtime_spdx_deps = set() | ||
873 | |||
874 | deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "") | ||
875 | seen_deps = set() | ||
876 | for dep, _ in deps.items(): | ||
877 | if dep in seen_deps: | ||
878 | continue | ||
879 | |||
880 | if dep not in providers: | ||
881 | continue | ||
882 | |||
883 | (dep, _) = providers[dep] | ||
884 | |||
885 | if not oe.packagedata.packaged(dep, localdata): | ||
886 | continue | ||
887 | |||
888 | dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d) | ||
889 | dep_pkg = dep_pkg_data["PKG"] | ||
890 | |||
891 | if dep in dep_package_cache: | ||
892 | dep_spdx_package = dep_package_cache[dep] | ||
893 | else: | ||
894 | bb.debug(1, "Searching for %s" % dep_pkg) | ||
895 | dep_spdx_package, _ = oe.sbom30.find_root_obj_in_jsonld( | ||
896 | d, | ||
897 | "packages-staging", | ||
898 | "package-" + dep_pkg, | ||
899 | oe.spdx30.software_Package, | ||
900 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install, | ||
901 | ) | ||
902 | dep_package_cache[dep] = dep_spdx_package | ||
903 | |||
904 | runtime_spdx_deps.add(dep_spdx_package) | ||
905 | seen_deps.add(dep) | ||
906 | |||
907 | if runtime_spdx_deps: | ||
908 | pkg_objset.new_scoped_relationship( | ||
909 | [spdx_package], | ||
910 | oe.spdx30.RelationshipType.dependsOn, | ||
911 | oe.spdx30.LifecycleScopeType.runtime, | ||
912 | [oe.sbom30.get_element_link_id(dep) for dep in runtime_spdx_deps], | ||
913 | ) | ||
914 | |||
915 | oe.sbom30.write_recipe_jsonld_doc(d, pkg_objset, "packages", deploydir) | ||
916 | |||
917 | oe.sbom30.write_recipe_jsonld_doc(d, common_objset, "common-package", deploydir) | ||
918 | |||
919 | |||
920 | def write_bitbake_spdx(d): | ||
921 | # Set PN to "bitbake" so that SPDX IDs can be generated | ||
922 | d.setVar("PN", "bitbake") | ||
923 | d.setVar("BB_TASKHASH", "bitbake") | ||
924 | oe.spdx_common.load_spdx_license_data(d) | ||
925 | |||
926 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
927 | |||
928 | objset = oe.sbom30.ObjectSet.new_objset(d, "bitbake", False) | ||
929 | |||
930 | host_import_key = d.getVar("SPDX_BUILD_HOST") | ||
931 | invoked_by = objset.new_agent("SPDX_INVOKED_BY", add=False) | ||
932 | on_behalf_of = objset.new_agent("SPDX_ON_BEHALF_OF", add=False) | ||
933 | |||
934 | if d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1": | ||
935 | # Since the Build objects are unique, we may as well set the creation | ||
936 | # time to the current time instead of the fallback SDE | ||
937 | objset.doc.creationInfo.created = datetime.now(timezone.utc) | ||
938 | |||
939 | # Each invocation of bitbake should have a unique ID since it is a | ||
940 | # unique build | ||
941 | nonce = os.urandom(16).hex() | ||
942 | |||
943 | build = objset.add_root( | ||
944 | oe.spdx30.build_Build( | ||
945 | _id=objset.new_spdxid(nonce, include_unihash=False), | ||
946 | creationInfo=objset.doc.creationInfo, | ||
947 | build_buildType=oe.sbom30.SPDX_BUILD_TYPE, | ||
948 | ) | ||
949 | ) | ||
950 | set_timestamp_now(d, build, "build_buildStartTime") | ||
951 | |||
952 | if host_import_key: | ||
953 | objset.new_scoped_relationship( | ||
954 | [build], | ||
955 | oe.spdx30.RelationshipType.hasHost, | ||
956 | oe.spdx30.LifecycleScopeType.build, | ||
957 | [objset.new_import(host_import_key)], | ||
958 | ) | ||
959 | |||
960 | if invoked_by: | ||
961 | objset.add(invoked_by) | ||
962 | invoked_by_spdx = objset.new_scoped_relationship( | ||
963 | [build], | ||
964 | oe.spdx30.RelationshipType.invokedBy, | ||
965 | oe.spdx30.LifecycleScopeType.build, | ||
966 | [invoked_by], | ||
967 | ) | ||
968 | |||
969 | if on_behalf_of: | ||
970 | objset.add(on_behalf_of) | ||
971 | objset.new_scoped_relationship( | ||
972 | [on_behalf_of], | ||
973 | oe.spdx30.RelationshipType.delegatedTo, | ||
974 | oe.spdx30.LifecycleScopeType.build, | ||
975 | invoked_by_spdx, | ||
976 | ) | ||
977 | |||
978 | elif on_behalf_of: | ||
979 | bb.warn("SPDX_ON_BEHALF_OF has no effect if SPDX_INVOKED_BY is not set") | ||
980 | |||
981 | else: | ||
982 | if host_import_key: | ||
983 | bb.warn( | ||
984 | "SPDX_BUILD_HOST has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set" | ||
985 | ) | ||
986 | |||
987 | if invoked_by: | ||
988 | bb.warn( | ||
989 | "SPDX_INVOKED_BY has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set" | ||
990 | ) | ||
991 | |||
992 | if on_behalf_of: | ||
993 | bb.warn( | ||
994 | "SPDX_ON_BEHALF_OF has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set" | ||
995 | ) | ||
996 | |||
997 | for obj in objset.foreach_type(oe.spdx30.Element): | ||
998 | obj.extension.append(oe.sbom30.OEIdAliasExtension()) | ||
999 | |||
1000 | oe.sbom30.write_jsonld_doc(d, objset, deploy_dir_spdx / "bitbake.spdx.json") | ||
1001 | |||
1002 | |||
1003 | def collect_build_package_inputs(d, objset, build, packages, files_by_hash=None): | ||
1004 | import oe.sbom30 | ||
1005 | |||
1006 | providers = oe.spdx_common.collect_package_providers(d) | ||
1007 | |||
1008 | build_deps = set() | ||
1009 | missing_providers = set() | ||
1010 | |||
1011 | for name in sorted(packages.keys()): | ||
1012 | if name not in providers: | ||
1013 | missing_providers.add(name) | ||
1014 | continue | ||
1015 | |||
1016 | pkg_name, pkg_hashfn = providers[name] | ||
1017 | |||
1018 | # Copy all of the package SPDX files into the Sbom elements | ||
1019 | pkg_spdx, pkg_objset = oe.sbom30.find_root_obj_in_jsonld( | ||
1020 | d, | ||
1021 | "packages", | ||
1022 | "package-" + pkg_name, | ||
1023 | oe.spdx30.software_Package, | ||
1024 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install, | ||
1025 | ) | ||
1026 | build_deps.add(oe.sbom30.get_element_link_id(pkg_spdx)) | ||
1027 | |||
1028 | if files_by_hash is not None: | ||
1029 | for h, f in pkg_objset.by_sha256_hash.items(): | ||
1030 | files_by_hash.setdefault(h, set()).update(f) | ||
1031 | |||
1032 | if missing_providers: | ||
1033 | bb.fatal( | ||
1034 | f"Unable to find SPDX provider(s) for: {', '.join(sorted(missing_providers))}" | ||
1035 | ) | ||
1036 | |||
1037 | if build_deps: | ||
1038 | objset.new_scoped_relationship( | ||
1039 | [build], | ||
1040 | oe.spdx30.RelationshipType.hasInput, | ||
1041 | oe.spdx30.LifecycleScopeType.build, | ||
1042 | sorted(list(build_deps)), | ||
1043 | ) | ||
1044 | |||
1045 | |||
1046 | def create_rootfs_spdx(d): | ||
1047 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
1048 | deploydir = Path(d.getVar("SPDXROOTFSDEPLOY")) | ||
1049 | root_packages_file = Path(d.getVar("SPDX_ROOTFS_PACKAGES")) | ||
1050 | image_basename = d.getVar("IMAGE_BASENAME") | ||
1051 | image_rootfs = d.getVar("IMAGE_ROOTFS") | ||
1052 | machine = d.getVar("MACHINE") | ||
1053 | |||
1054 | with root_packages_file.open("r") as f: | ||
1055 | packages = json.load(f) | ||
1056 | |||
1057 | objset = oe.sbom30.ObjectSet.new_objset( | ||
1058 | d, "%s-%s-rootfs" % (image_basename, machine) | ||
1059 | ) | ||
1060 | |||
1061 | rootfs = objset.add_root( | ||
1062 | oe.spdx30.software_Package( | ||
1063 | _id=objset.new_spdxid("rootfs", image_basename), | ||
1064 | creationInfo=objset.doc.creationInfo, | ||
1065 | name=image_basename, | ||
1066 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive, | ||
1067 | ) | ||
1068 | ) | ||
1069 | set_timestamp_now(d, rootfs, "builtTime") | ||
1070 | |||
1071 | rootfs_build = objset.add_root(objset.new_task_build("rootfs", "rootfs")) | ||
1072 | set_timestamp_now(d, rootfs_build, "build_buildEndTime") | ||
1073 | |||
1074 | objset.new_scoped_relationship( | ||
1075 | [rootfs_build], | ||
1076 | oe.spdx30.RelationshipType.hasOutput, | ||
1077 | oe.spdx30.LifecycleScopeType.build, | ||
1078 | [rootfs], | ||
1079 | ) | ||
1080 | |||
1081 | files_by_hash = {} | ||
1082 | collect_build_package_inputs(d, objset, rootfs_build, packages, files_by_hash) | ||
1083 | |||
1084 | files = set() | ||
1085 | for dirpath, dirnames, filenames in os.walk(image_rootfs, onerror=walk_error): | ||
1086 | dirnames.sort() | ||
1087 | filenames.sort() | ||
1088 | for fn in filenames: | ||
1089 | fpath = Path(dirpath) / fn | ||
1090 | if fpath.is_symlink() or not fpath.is_file(): | ||
1091 | continue | ||
1092 | |||
1093 | relpath = str(fpath.relative_to(image_rootfs)) | ||
1094 | h = bb.utils.sha256_file(fpath) | ||
1095 | |||
1096 | found = False | ||
1097 | if h in files_by_hash: | ||
1098 | for f in files_by_hash[h]: | ||
1099 | if isinstance(f, oe.spdx30.software_File) and f.name == relpath: | ||
1100 | files.add(oe.sbom30.get_element_link_id(f)) | ||
1101 | found = True | ||
1102 | break | ||
1103 | |||
1104 | if not found: | ||
1105 | files.add( | ||
1106 | objset.new_file( | ||
1107 | objset.new_spdxid("rootfs-file", relpath), | ||
1108 | relpath, | ||
1109 | fpath, | ||
1110 | ) | ||
1111 | ) | ||
1112 | |||
1113 | if files: | ||
1114 | objset.new_relationship( | ||
1115 | [rootfs], | ||
1116 | oe.spdx30.RelationshipType.contains, | ||
1117 | sorted(list(files)), | ||
1118 | ) | ||
1119 | |||
1120 | oe.sbom30.write_recipe_jsonld_doc(d, objset, "rootfs", deploydir) | ||
1121 | |||
1122 | |||
1123 | def create_image_spdx(d): | ||
1124 | import oe.sbom30 | ||
1125 | |||
1126 | image_deploy_dir = Path(d.getVar("IMGDEPLOYDIR")) | ||
1127 | manifest_path = Path(d.getVar("IMAGE_OUTPUT_MANIFEST")) | ||
1128 | spdx_work_dir = Path(d.getVar("SPDXIMAGEWORK")) | ||
1129 | |||
1130 | image_basename = d.getVar("IMAGE_BASENAME") | ||
1131 | machine = d.getVar("MACHINE") | ||
1132 | |||
1133 | objset = oe.sbom30.ObjectSet.new_objset( | ||
1134 | d, "%s-%s-image" % (image_basename, machine) | ||
1135 | ) | ||
1136 | |||
1137 | with manifest_path.open("r") as f: | ||
1138 | manifest = json.load(f) | ||
1139 | |||
1140 | builds = [] | ||
1141 | for task in manifest: | ||
1142 | imagetype = task["imagetype"] | ||
1143 | taskname = task["taskname"] | ||
1144 | |||
1145 | image_build = objset.add_root( | ||
1146 | objset.new_task_build(taskname, "image/%s" % imagetype) | ||
1147 | ) | ||
1148 | set_timestamp_now(d, image_build, "build_buildEndTime") | ||
1149 | builds.append(image_build) | ||
1150 | |||
1151 | artifacts = [] | ||
1152 | |||
1153 | for image in task["images"]: | ||
1154 | image_filename = image["filename"] | ||
1155 | image_path = image_deploy_dir / image_filename | ||
1156 | if os.path.isdir(image_path): | ||
1157 | a = add_package_files( | ||
1158 | d, | ||
1159 | objset, | ||
1160 | image_path, | ||
1161 | lambda file_counter: objset.new_spdxid( | ||
1162 | "imagefile", str(file_counter) | ||
1163 | ), | ||
1164 | lambda filepath: [], | ||
1165 | license_data=None, | ||
1166 | ignore_dirs=[], | ||
1167 | ignore_top_level_dirs=[], | ||
1168 | archive=None, | ||
1169 | ) | ||
1170 | artifacts.extend(a) | ||
1171 | else: | ||
1172 | a = objset.add_root( | ||
1173 | oe.spdx30.software_File( | ||
1174 | _id=objset.new_spdxid("image", image_filename), | ||
1175 | creationInfo=objset.doc.creationInfo, | ||
1176 | name=image_filename, | ||
1177 | verifiedUsing=[ | ||
1178 | oe.spdx30.Hash( | ||
1179 | algorithm=oe.spdx30.HashAlgorithm.sha256, | ||
1180 | hashValue=bb.utils.sha256_file(image_path), | ||
1181 | ) | ||
1182 | ], | ||
1183 | ) | ||
1184 | ) | ||
1185 | |||
1186 | artifacts.append(a) | ||
1187 | |||
1188 | for a in artifacts: | ||
1189 | set_purposes( | ||
1190 | d, a, "SPDX_IMAGE_PURPOSE:%s" % imagetype, "SPDX_IMAGE_PURPOSE" | ||
1191 | ) | ||
1192 | |||
1193 | set_timestamp_now(d, a, "builtTime") | ||
1194 | |||
1195 | |||
1196 | if artifacts: | ||
1197 | objset.new_scoped_relationship( | ||
1198 | [image_build], | ||
1199 | oe.spdx30.RelationshipType.hasOutput, | ||
1200 | oe.spdx30.LifecycleScopeType.build, | ||
1201 | artifacts, | ||
1202 | ) | ||
1203 | |||
1204 | if builds: | ||
1205 | rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld( | ||
1206 | d, | ||
1207 | "rootfs", | ||
1208 | "%s-%s-rootfs" % (image_basename, machine), | ||
1209 | oe.spdx30.software_Package, | ||
1210 | # TODO: Should use a purpose to filter here? | ||
1211 | ) | ||
1212 | objset.new_scoped_relationship( | ||
1213 | builds, | ||
1214 | oe.spdx30.RelationshipType.hasInput, | ||
1215 | oe.spdx30.LifecycleScopeType.build, | ||
1216 | [oe.sbom30.get_element_link_id(rootfs_image)], | ||
1217 | ) | ||
1218 | |||
1219 | objset.add_aliases() | ||
1220 | objset.link() | ||
1221 | oe.sbom30.write_recipe_jsonld_doc(d, objset, "image", spdx_work_dir) | ||
1222 | |||
1223 | |||
1224 | def create_image_sbom_spdx(d): | ||
1225 | import oe.sbom30 | ||
1226 | |||
1227 | image_name = d.getVar("IMAGE_NAME") | ||
1228 | image_basename = d.getVar("IMAGE_BASENAME") | ||
1229 | image_link_name = d.getVar("IMAGE_LINK_NAME") | ||
1230 | imgdeploydir = Path(d.getVar("SPDXIMAGEDEPLOYDIR")) | ||
1231 | machine = d.getVar("MACHINE") | ||
1232 | |||
1233 | spdx_path = imgdeploydir / (image_name + ".spdx.json") | ||
1234 | |||
1235 | root_elements = [] | ||
1236 | |||
1237 | # TODO: Do we need to add the rootfs or are the image files sufficient? | ||
1238 | rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld( | ||
1239 | d, | ||
1240 | "rootfs", | ||
1241 | "%s-%s-rootfs" % (image_basename, machine), | ||
1242 | oe.spdx30.software_Package, | ||
1243 | # TODO: Should use a purpose here? | ||
1244 | ) | ||
1245 | root_elements.append(oe.sbom30.get_element_link_id(rootfs_image)) | ||
1246 | |||
1247 | image_objset, _ = oe.sbom30.find_jsonld( | ||
1248 | d, "image", "%s-%s-image" % (image_basename, machine), required=True | ||
1249 | ) | ||
1250 | for o in image_objset.foreach_root(oe.spdx30.software_File): | ||
1251 | root_elements.append(oe.sbom30.get_element_link_id(o)) | ||
1252 | |||
1253 | objset, sbom = oe.sbom30.create_sbom(d, image_name, root_elements) | ||
1254 | |||
1255 | oe.sbom30.write_jsonld_doc(d, objset, spdx_path) | ||
1256 | |||
1257 | def make_image_link(target_path, suffix): | ||
1258 | if image_link_name: | ||
1259 | link = imgdeploydir / (image_link_name + suffix) | ||
1260 | if link != target_path: | ||
1261 | link.symlink_to(os.path.relpath(target_path, link.parent)) | ||
1262 | |||
1263 | make_image_link(spdx_path, ".spdx.json") | ||
1264 | |||
1265 | |||
1266 | def sdk_create_spdx(d, sdk_type, spdx_work_dir, toolchain_outputname): | ||
1267 | sdk_name = toolchain_outputname + "-" + sdk_type | ||
1268 | sdk_packages = oe.sdk.sdk_list_installed_packages(d, sdk_type == "target") | ||
1269 | |||
1270 | objset = oe.sbom30.ObjectSet.new_objset(d, sdk_name) | ||
1271 | |||
1272 | sdk_rootfs = objset.add_root( | ||
1273 | oe.spdx30.software_Package( | ||
1274 | _id=objset.new_spdxid("sdk-rootfs", sdk_name), | ||
1275 | creationInfo=objset.doc.creationInfo, | ||
1276 | name=sdk_name, | ||
1277 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive, | ||
1278 | ) | ||
1279 | ) | ||
1280 | set_timestamp_now(d, sdk_rootfs, "builtTime") | ||
1281 | |||
1282 | sdk_build = objset.add_root(objset.new_task_build("sdk-rootfs", "sdk-rootfs")) | ||
1283 | set_timestamp_now(d, sdk_build, "build_buildEndTime") | ||
1284 | |||
1285 | objset.new_scoped_relationship( | ||
1286 | [sdk_build], | ||
1287 | oe.spdx30.RelationshipType.hasOutput, | ||
1288 | oe.spdx30.LifecycleScopeType.build, | ||
1289 | [sdk_rootfs], | ||
1290 | ) | ||
1291 | |||
1292 | collect_build_package_inputs(d, objset, sdk_build, sdk_packages) | ||
1293 | |||
1294 | objset.add_aliases() | ||
1295 | oe.sbom30.write_jsonld_doc(d, objset, spdx_work_dir / "sdk-rootfs.spdx.json") | ||
1296 | |||
1297 | |||
1298 | def create_sdk_sbom(d, sdk_deploydir, spdx_work_dir, toolchain_outputname): | ||
1299 | # Load the document written earlier | ||
1300 | rootfs_objset = oe.sbom30.load_jsonld( | ||
1301 | d, spdx_work_dir / "sdk-rootfs.spdx.json", required=True | ||
1302 | ) | ||
1303 | |||
1304 | # Create a new build for the SDK installer | ||
1305 | sdk_build = rootfs_objset.new_task_build("sdk-populate", "sdk-populate") | ||
1306 | set_timestamp_now(d, sdk_build, "build_buildEndTime") | ||
1307 | |||
1308 | rootfs = rootfs_objset.find_root(oe.spdx30.software_Package) | ||
1309 | if rootfs is None: | ||
1310 | bb.fatal("Unable to find rootfs artifact") | ||
1311 | |||
1312 | rootfs_objset.new_scoped_relationship( | ||
1313 | [sdk_build], | ||
1314 | oe.spdx30.RelationshipType.hasInput, | ||
1315 | oe.spdx30.LifecycleScopeType.build, | ||
1316 | [rootfs], | ||
1317 | ) | ||
1318 | |||
1319 | files = set() | ||
1320 | root_files = [] | ||
1321 | |||
1322 | # NOTE: os.walk() doesn't return symlinks | ||
1323 | for dirpath, dirnames, filenames in os.walk(sdk_deploydir, onerror=walk_error): | ||
1324 | dirnames.sort() | ||
1325 | filenames.sort() | ||
1326 | for fn in filenames: | ||
1327 | fpath = Path(dirpath) / fn | ||
1328 | if not fpath.is_file() or fpath.is_symlink(): | ||
1329 | continue | ||
1330 | |||
1331 | relpath = str(fpath.relative_to(sdk_deploydir)) | ||
1332 | |||
1333 | f = rootfs_objset.new_file( | ||
1334 | rootfs_objset.new_spdxid("sdk-installer", relpath), | ||
1335 | relpath, | ||
1336 | fpath, | ||
1337 | ) | ||
1338 | set_timestamp_now(d, f, "builtTime") | ||
1339 | |||
1340 | if fn.endswith(".manifest"): | ||
1341 | f.software_primaryPurpose = oe.spdx30.software_SoftwarePurpose.manifest | ||
1342 | elif fn.endswith(".testdata.json"): | ||
1343 | f.software_primaryPurpose = ( | ||
1344 | oe.spdx30.software_SoftwarePurpose.configuration | ||
1345 | ) | ||
1346 | else: | ||
1347 | set_purposes(d, f, "SPDX_SDK_PURPOSE") | ||
1348 | root_files.append(f) | ||
1349 | |||
1350 | files.add(f) | ||
1351 | |||
1352 | if files: | ||
1353 | rootfs_objset.new_scoped_relationship( | ||
1354 | [sdk_build], | ||
1355 | oe.spdx30.RelationshipType.hasOutput, | ||
1356 | oe.spdx30.LifecycleScopeType.build, | ||
1357 | files, | ||
1358 | ) | ||
1359 | else: | ||
1360 | bb.warn(f"No SDK output files found in {sdk_deploydir}") | ||
1361 | |||
1362 | objset, sbom = oe.sbom30.create_sbom( | ||
1363 | d, toolchain_outputname, sorted(list(files)), [rootfs_objset] | ||
1364 | ) | ||
1365 | |||
1366 | oe.sbom30.write_jsonld_doc( | ||
1367 | d, objset, sdk_deploydir / (toolchain_outputname + ".spdx.json") | ||
1368 | ) | ||
diff --git a/meta/lib/oe/spdx_common.py b/meta/lib/oe/spdx_common.py new file mode 100644 index 0000000000..c2dec65563 --- /dev/null +++ b/meta/lib/oe/spdx_common.py | |||
@@ -0,0 +1,285 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import bb | ||
8 | import collections | ||
9 | import json | ||
10 | import oe.packagedata | ||
11 | import re | ||
12 | import shutil | ||
13 | |||
14 | from pathlib import Path | ||
15 | from dataclasses import dataclass | ||
16 | |||
17 | LIC_REGEX = re.compile( | ||
18 | rb"^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$", | ||
19 | re.MULTILINE, | ||
20 | ) | ||
21 | |||
22 | |||
23 | def extract_licenses(filename): | ||
24 | """ | ||
25 | Extract SPDX License identifiers from a file | ||
26 | """ | ||
27 | try: | ||
28 | with open(filename, "rb") as f: | ||
29 | size = min(15000, os.stat(filename).st_size) | ||
30 | txt = f.read(size) | ||
31 | licenses = re.findall(LIC_REGEX, txt) | ||
32 | if licenses: | ||
33 | ascii_licenses = [lic.decode("ascii") for lic in licenses] | ||
34 | return ascii_licenses | ||
35 | except Exception as e: | ||
36 | bb.warn(f"Exception reading {filename}: {e}") | ||
37 | return [] | ||
38 | |||
39 | |||
40 | def is_work_shared_spdx(d): | ||
41 | return '/work-shared/' in d.getVar('S') | ||
42 | |||
43 | |||
44 | def load_spdx_license_data(d): | ||
45 | with open(d.getVar("SPDX_LICENSES"), "r") as f: | ||
46 | data = json.load(f) | ||
47 | # Transform the license array to a dictionary | ||
48 | data["licenses"] = {l["licenseId"]: l for l in data["licenses"]} | ||
49 | |||
50 | return data | ||
51 | |||
52 | |||
53 | def process_sources(d): | ||
54 | """ | ||
55 | Returns True if the sources for this recipe should be included in the SPDX | ||
56 | or False if not | ||
57 | """ | ||
58 | pn = d.getVar("PN") | ||
59 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() | ||
60 | if pn in assume_provided: | ||
61 | for p in d.getVar("PROVIDES").split(): | ||
62 | if p != pn: | ||
63 | pn = p | ||
64 | break | ||
65 | |||
66 | # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, | ||
67 | # so avoid archiving source here. | ||
68 | if pn.startswith("glibc-locale"): | ||
69 | return False | ||
70 | if d.getVar("PN") == "libtool-cross": | ||
71 | return False | ||
72 | if d.getVar("PN") == "libgcc-initial": | ||
73 | return False | ||
74 | if d.getVar("PN") == "shadow-sysroot": | ||
75 | return False | ||
76 | |||
77 | return True | ||
78 | |||
79 | |||
80 | @dataclass(frozen=True) | ||
81 | class Dep(object): | ||
82 | pn: str | ||
83 | hashfn: str | ||
84 | in_taskhash: bool | ||
85 | |||
86 | |||
87 | def collect_direct_deps(d, dep_task): | ||
88 | """ | ||
89 | Find direct dependencies of current task | ||
90 | |||
91 | Returns the list of recipes that have a dep_task that the current task | ||
92 | depends on | ||
93 | """ | ||
94 | current_task = "do_" + d.getVar("BB_CURRENTTASK") | ||
95 | pn = d.getVar("PN") | ||
96 | |||
97 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
98 | |||
99 | for this_dep in taskdepdata.values(): | ||
100 | if this_dep[0] == pn and this_dep[1] == current_task: | ||
101 | break | ||
102 | else: | ||
103 | bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata") | ||
104 | |||
105 | deps = set() | ||
106 | |||
107 | for dep_name in this_dep.deps: | ||
108 | dep_data = taskdepdata[dep_name] | ||
109 | if dep_data.taskname == dep_task and dep_data.pn != pn: | ||
110 | deps.add((dep_data.pn, dep_data.hashfn, dep_name in this_dep.taskhash_deps)) | ||
111 | |||
112 | return sorted(deps) | ||
113 | |||
114 | |||
115 | def get_spdx_deps(d): | ||
116 | """ | ||
117 | Reads the SPDX dependencies JSON file and returns the data | ||
118 | """ | ||
119 | spdx_deps_file = Path(d.getVar("SPDXDEPS")) | ||
120 | |||
121 | deps = [] | ||
122 | with spdx_deps_file.open("r") as f: | ||
123 | for d in json.load(f): | ||
124 | deps.append(Dep(*d)) | ||
125 | return deps | ||
126 | |||
127 | |||
128 | def collect_package_providers(d): | ||
129 | """ | ||
130 | Returns a dictionary where each RPROVIDES is mapped to the package that | ||
131 | provides it | ||
132 | """ | ||
133 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
134 | |||
135 | providers = {} | ||
136 | |||
137 | deps = collect_direct_deps(d, "do_create_spdx") | ||
138 | deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True)) | ||
139 | |||
140 | for dep_pn, dep_hashfn, _ in deps: | ||
141 | localdata = d | ||
142 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
143 | if not recipe_data: | ||
144 | localdata = bb.data.createCopy(d) | ||
145 | localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}") | ||
146 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
147 | |||
148 | for pkg in recipe_data.get("PACKAGES", "").split(): | ||
149 | pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata) | ||
150 | rprovides = set( | ||
151 | n | ||
152 | for n, _ in bb.utils.explode_dep_versions2( | ||
153 | pkg_data.get("RPROVIDES", "") | ||
154 | ).items() | ||
155 | ) | ||
156 | rprovides.add(pkg) | ||
157 | |||
158 | if "PKG" in pkg_data: | ||
159 | pkg = pkg_data["PKG"] | ||
160 | rprovides.add(pkg) | ||
161 | |||
162 | for r in rprovides: | ||
163 | providers[r] = (pkg, dep_hashfn) | ||
164 | |||
165 | return providers | ||
166 | |||
167 | |||
168 | def get_patched_src(d): | ||
169 | """ | ||
170 | Save patched source of the recipe in SPDX_WORKDIR. | ||
171 | """ | ||
172 | spdx_workdir = d.getVar("SPDXWORK") | ||
173 | spdx_sysroot_native = d.getVar("STAGING_DIR_NATIVE") | ||
174 | pn = d.getVar("PN") | ||
175 | |||
176 | workdir = d.getVar("WORKDIR") | ||
177 | |||
178 | try: | ||
179 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR | ||
180 | if not is_work_shared_spdx(d): | ||
181 | # Change the WORKDIR to make do_unpack do_patch run in another dir. | ||
182 | d.setVar("WORKDIR", spdx_workdir) | ||
183 | # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). | ||
184 | d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native) | ||
185 | |||
186 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the | ||
187 | # possibly requiring of the following tasks (such as some recipes's | ||
188 | # do_patch required 'B' existed). | ||
189 | bb.utils.mkdirhier(d.getVar("B")) | ||
190 | |||
191 | bb.build.exec_func("do_unpack", d) | ||
192 | |||
193 | if d.getVar("SRC_URI") != "": | ||
194 | if bb.data.inherits_class('dos2unix', d): | ||
195 | bb.build.exec_func('do_convert_crlf_to_lf', d) | ||
196 | bb.build.exec_func("do_patch", d) | ||
197 | |||
198 | # Copy source from work-share to spdx_workdir | ||
199 | if is_work_shared_spdx(d): | ||
200 | share_src = d.getVar('S') | ||
201 | d.setVar("WORKDIR", spdx_workdir) | ||
202 | d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native) | ||
203 | # Copy source to ${SPDXWORK}, same basename dir of ${S}; | ||
204 | src_dir = ( | ||
205 | spdx_workdir | ||
206 | + "/" | ||
207 | + os.path.basename(share_src) | ||
208 | ) | ||
209 | # For kernel souce, rename suffix dir 'kernel-source' | ||
210 | # to ${BP} (${BPN}-${PV}) | ||
211 | if bb.data.inherits_class("kernel", d): | ||
212 | src_dir = spdx_workdir + "/" + d.getVar('BP') | ||
213 | |||
214 | bb.note(f"copyhardlinktree {share_src} to {src_dir}") | ||
215 | oe.path.copyhardlinktree(share_src, src_dir) | ||
216 | |||
217 | # Some userland has no source. | ||
218 | if not os.path.exists(spdx_workdir): | ||
219 | bb.utils.mkdirhier(spdx_workdir) | ||
220 | finally: | ||
221 | d.setVar("WORKDIR", workdir) | ||
222 | |||
223 | |||
224 | def has_task(d, task): | ||
225 | return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) | ||
226 | |||
227 | |||
228 | def fetch_data_to_uri(fd, name): | ||
229 | """ | ||
230 | Translates a bitbake FetchData to a string URI | ||
231 | """ | ||
232 | uri = fd.type | ||
233 | # Map gitsm to git, since gitsm:// is not a valid URI protocol | ||
234 | if uri == "gitsm": | ||
235 | uri = "git" | ||
236 | proto = getattr(fd, "proto", None) | ||
237 | if proto is not None: | ||
238 | uri = uri + "+" + proto | ||
239 | uri = uri + "://" + fd.host + fd.path | ||
240 | |||
241 | if fd.method.supports_srcrev(): | ||
242 | uri = uri + "@" + fd.revision | ||
243 | |||
244 | return uri | ||
245 | |||
246 | def is_compiled_source (filename, compiled_sources, types): | ||
247 | """ | ||
248 | Check if the file is a compiled file | ||
249 | """ | ||
250 | import os | ||
251 | # If we don't have compiled source, we assume all are compiled. | ||
252 | if not compiled_sources: | ||
253 | return True | ||
254 | |||
255 | # We return always true if the file type is not in the list of compiled files. | ||
256 | # Some files in the source directory are not compiled, for example, Makefiles, | ||
257 | # but also python .py file. We need to include them in the SPDX. | ||
258 | basename = os.path.basename(filename) | ||
259 | ext = basename.partition(".")[2] | ||
260 | if ext not in types: | ||
261 | return True | ||
262 | # Check that the file is in the list | ||
263 | return filename in compiled_sources | ||
264 | |||
265 | def get_compiled_sources(d): | ||
266 | """ | ||
267 | Get list of compiled sources from debug information and normalize the paths | ||
268 | """ | ||
269 | import itertools | ||
270 | source_info = oe.package.read_debugsources_info(d) | ||
271 | if not source_info: | ||
272 | bb.debug(1, "Do not have debugsources.list. Skipping") | ||
273 | return [], [] | ||
274 | |||
275 | # Sources are not split now in SPDX, so we aggregate them | ||
276 | sources = set(itertools.chain.from_iterable(source_info.values())) | ||
277 | # Check extensions of files | ||
278 | types = set() | ||
279 | for src in sources: | ||
280 | basename = os.path.basename(src) | ||
281 | ext = basename.partition(".")[2] | ||
282 | if ext not in types and ext: | ||
283 | types.add(ext) | ||
284 | bb.debug(1, f"Num of sources: {len(sources)} and types: {len(types)} {str(types)}") | ||
285 | return sources, types | ||
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py index a46e5502ab..ef687f5d41 100644 --- a/meta/lib/oe/sstatesig.py +++ b/meta/lib/oe/sstatesig.py | |||
@@ -3,6 +3,7 @@ | |||
3 | # | 3 | # |
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | import bb.parse | ||
6 | import bb.siggen | 7 | import bb.siggen |
7 | import bb.runqueue | 8 | import bb.runqueue |
8 | import oe | 9 | import oe |
@@ -93,6 +94,14 @@ def sstate_lockedsigs(d): | |||
93 | sigs[pn][task] = [h, siggen_lockedsigs_var] | 94 | sigs[pn][task] = [h, siggen_lockedsigs_var] |
94 | return sigs | 95 | return sigs |
95 | 96 | ||
97 | def lockedsigs_unihashmap(d): | ||
98 | unihashmap = {} | ||
99 | data = (d.getVar("SIGGEN_UNIHASHMAP") or "").split() | ||
100 | for entry in data: | ||
101 | pn, task, taskhash, unihash = entry.split(":") | ||
102 | unihashmap[(pn, task)] = (taskhash, unihash) | ||
103 | return unihashmap | ||
104 | |||
96 | class SignatureGeneratorOEBasicHashMixIn(object): | 105 | class SignatureGeneratorOEBasicHashMixIn(object): |
97 | supports_multiconfig_datacaches = True | 106 | supports_multiconfig_datacaches = True |
98 | 107 | ||
@@ -100,6 +109,7 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
100 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() | 109 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() |
101 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() | 110 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() |
102 | self.lockedsigs = sstate_lockedsigs(data) | 111 | self.lockedsigs = sstate_lockedsigs(data) |
112 | self.unihashmap = lockedsigs_unihashmap(data) | ||
103 | self.lockedhashes = {} | 113 | self.lockedhashes = {} |
104 | self.lockedpnmap = {} | 114 | self.lockedpnmap = {} |
105 | self.lockedhashfn = {} | 115 | self.lockedhashfn = {} |
@@ -209,6 +219,15 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
209 | def get_cached_unihash(self, tid): | 219 | def get_cached_unihash(self, tid): |
210 | if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal: | 220 | if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal: |
211 | return self.lockedhashes[tid] | 221 | return self.lockedhashes[tid] |
222 | |||
223 | (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) | ||
224 | recipename = self.lockedpnmap[fn] | ||
225 | |||
226 | if (recipename, task) in self.unihashmap: | ||
227 | taskhash, unihash = self.unihashmap[(recipename, task)] | ||
228 | if taskhash == self.taskhash[tid]: | ||
229 | return unihash | ||
230 | |||
212 | return super().get_cached_unihash(tid) | 231 | return super().get_cached_unihash(tid) |
213 | 232 | ||
214 | def dump_sigtask(self, fn, task, stampbase, runtime): | 233 | def dump_sigtask(self, fn, task, stampbase, runtime): |
@@ -219,6 +238,7 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
219 | 238 | ||
220 | def dump_lockedsigs(self, sigfile, taskfilter=None): | 239 | def dump_lockedsigs(self, sigfile, taskfilter=None): |
221 | types = {} | 240 | types = {} |
241 | unihashmap = {} | ||
222 | for tid in self.runtaskdeps: | 242 | for tid in self.runtaskdeps: |
223 | # Bitbake changed this to a tuple in newer versions | 243 | # Bitbake changed this to a tuple in newer versions |
224 | if isinstance(tid, tuple): | 244 | if isinstance(tid, tuple): |
@@ -226,13 +246,18 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
226 | if taskfilter: | 246 | if taskfilter: |
227 | if not tid in taskfilter: | 247 | if not tid in taskfilter: |
228 | continue | 248 | continue |
229 | fn = bb.runqueue.fn_from_tid(tid) | 249 | (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) |
230 | t = self.lockedhashfn[fn].split(" ")[1].split(":")[5] | 250 | t = self.lockedhashfn[fn].split(" ")[1].split(":")[5] |
231 | t = 't-' + t.replace('_', '-') | 251 | t = 't-' + t.replace('_', '-') |
232 | if t not in types: | 252 | if t not in types: |
233 | types[t] = [] | 253 | types[t] = [] |
234 | types[t].append(tid) | 254 | types[t].append(tid) |
235 | 255 | ||
256 | taskhash = self.taskhash[tid] | ||
257 | unihash = self.get_unihash(tid) | ||
258 | if taskhash != unihash: | ||
259 | unihashmap[tid] = " " + self.lockedpnmap[fn] + ":" + task + ":" + taskhash + ":" + unihash | ||
260 | |||
236 | with open(sigfile, "w") as f: | 261 | with open(sigfile, "w") as f: |
237 | l = sorted(types) | 262 | l = sorted(types) |
238 | for t in l: | 263 | for t in l: |
@@ -245,7 +270,12 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
245 | continue | 270 | continue |
246 | f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n") | 271 | f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n") |
247 | f.write(' "\n') | 272 | f.write(' "\n') |
248 | f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"' % (self.machine, " ".join(l))) | 273 | f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"\n' % (self.machine, " ".join(l))) |
274 | f.write('SIGGEN_UNIHASHMAP += "\\\n') | ||
275 | sortedtid = sorted(unihashmap, key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)]) | ||
276 | for tid in sortedtid: | ||
277 | f.write(unihashmap[tid] + " \\\n") | ||
278 | f.write(' "\n') | ||
249 | 279 | ||
250 | def dump_siglist(self, sigfile, path_prefix_strip=None): | 280 | def dump_siglist(self, sigfile, path_prefix_strip=None): |
251 | def strip_fn(fn): | 281 | def strip_fn(fn): |
@@ -327,7 +357,6 @@ class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.sigge | |||
327 | self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') | 357 | self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') |
328 | if not self.method: | 358 | if not self.method: |
329 | bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") | 359 | bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") |
330 | self.max_parallel = int(data.getVar('BB_HASHSERVE_MAX_PARALLEL') or 1) | ||
331 | self.username = data.getVar("BB_HASHSERVE_USERNAME") | 360 | self.username = data.getVar("BB_HASHSERVE_USERNAME") |
332 | self.password = data.getVar("BB_HASHSERVE_PASSWORD") | 361 | self.password = data.getVar("BB_HASHSERVE_PASSWORD") |
333 | if not self.username or not self.password: | 362 | if not self.username or not self.password: |
@@ -371,7 +400,13 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
371 | return siginfo.rpartition('.')[2] | 400 | return siginfo.rpartition('.')[2] |
372 | 401 | ||
373 | def get_time(fullpath): | 402 | def get_time(fullpath): |
374 | return os.stat(fullpath).st_mtime | 403 | # NFS can end up in a weird state where the file exists but has no stat info. |
404 | # If that happens, we assume it doesn't acutally exist and show a warning | ||
405 | try: | ||
406 | return os.stat(fullpath).st_mtime | ||
407 | except FileNotFoundError: | ||
408 | bb.warn("Could not obtain mtime for {}".format(fullpath)) | ||
409 | return None | ||
375 | 410 | ||
376 | # First search in stamps dir | 411 | # First search in stamps dir |
377 | localdata = d.createCopy() | 412 | localdata = d.createCopy() |
@@ -384,6 +419,9 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
384 | if pn.startswith("gcc-source"): | 419 | if pn.startswith("gcc-source"): |
385 | # gcc-source shared workdir is a special case :( | 420 | # gcc-source shared workdir is a special case :( |
386 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") | 421 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") |
422 | elif pn.startswith("llvm-project-source"): | ||
423 | # llvm-project-source shared workdir is also a special case :*( | ||
424 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/llvm-project-source-${PV}-${PR}") | ||
387 | 425 | ||
388 | filespec = '%s.%s.sigdata.*' % (stamp, taskname) | 426 | filespec = '%s.%s.sigdata.*' % (stamp, taskname) |
389 | foundall = False | 427 | foundall = False |
@@ -394,13 +432,17 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
394 | if taskhashlist: | 432 | if taskhashlist: |
395 | for taskhash in taskhashlist: | 433 | for taskhash in taskhashlist: |
396 | if fullpath.endswith('.%s' % taskhash): | 434 | if fullpath.endswith('.%s' % taskhash): |
397 | hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)} | 435 | mtime = get_time(fullpath) |
436 | if mtime: | ||
437 | hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':mtime} | ||
398 | if len(hashfiles) == len(taskhashlist): | 438 | if len(hashfiles) == len(taskhashlist): |
399 | foundall = True | 439 | foundall = True |
400 | break | 440 | break |
401 | else: | 441 | else: |
402 | hashval = get_hashval(fullpath) | 442 | hashval = get_hashval(fullpath) |
403 | hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)} | 443 | mtime = get_time(fullpath) |
444 | if mtime: | ||
445 | hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':mtime} | ||
404 | 446 | ||
405 | if not taskhashlist or (len(hashfiles) < 2 and not foundall): | 447 | if not taskhashlist or (len(hashfiles) < 2 and not foundall): |
406 | # That didn't work, look in sstate-cache | 448 | # That didn't work, look in sstate-cache |
@@ -431,7 +473,9 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
431 | actual_hashval = get_hashval(fullpath) | 473 | actual_hashval = get_hashval(fullpath) |
432 | if actual_hashval in hashfiles: | 474 | if actual_hashval in hashfiles: |
433 | continue | 475 | continue |
434 | hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':get_time(fullpath)} | 476 | mtime = get_time(fullpath) |
477 | if mtime: | ||
478 | hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':mtime} | ||
435 | 479 | ||
436 | return hashfiles | 480 | return hashfiles |
437 | 481 | ||
@@ -450,6 +494,7 @@ def sstate_get_manifest_filename(task, d): | |||
450 | d2.setVar("SSTATE_MANMACH", extrainf) | 494 | d2.setVar("SSTATE_MANMACH", extrainf) |
451 | return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) | 495 | return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) |
452 | 496 | ||
497 | @bb.parse.vardepsexclude("BBEXTENDCURR", "BBEXTENDVARIANT", "OVERRIDES", "PACKAGE_EXTRA_ARCHS") | ||
453 | def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): | 498 | def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): |
454 | d2 = d | 499 | d2 = d |
455 | variant = '' | 500 | variant = '' |
@@ -524,6 +569,7 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
524 | if task == "package": | 569 | if task == "package": |
525 | include_timestamps = True | 570 | include_timestamps = True |
526 | include_root = False | 571 | include_root = False |
572 | source_date_epoch = float(d.getVar("SOURCE_DATE_EPOCH")) | ||
527 | hash_version = d.getVar('HASHEQUIV_HASH_VERSION') | 573 | hash_version = d.getVar('HASHEQUIV_HASH_VERSION') |
528 | extra_sigdata = d.getVar("HASHEQUIV_EXTRA_SIGDATA") | 574 | extra_sigdata = d.getVar("HASHEQUIV_EXTRA_SIGDATA") |
529 | 575 | ||
@@ -615,7 +661,11 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
615 | raise Exception(msg).with_traceback(e.__traceback__) | 661 | raise Exception(msg).with_traceback(e.__traceback__) |
616 | 662 | ||
617 | if include_timestamps: | 663 | if include_timestamps: |
618 | update_hash(" %10d" % s.st_mtime) | 664 | # Need to clamp to SOURCE_DATE_EPOCH |
665 | if s.st_mtime > source_date_epoch: | ||
666 | update_hash(" %10d" % source_date_epoch) | ||
667 | else: | ||
668 | update_hash(" %10d" % s.st_mtime) | ||
619 | 669 | ||
620 | update_hash(" ") | 670 | update_hash(" ") |
621 | if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode): | 671 | if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode): |
diff --git a/meta/lib/oe/tune.py b/meta/lib/oe/tune.py new file mode 100644 index 0000000000..7fda19430d --- /dev/null +++ b/meta/lib/oe/tune.py | |||
@@ -0,0 +1,81 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | # riscv_isa_to_tune(isa) | ||
8 | # | ||
9 | # Automatically translate a RISC-V ISA string to TUNE_FEATURES | ||
10 | # | ||
11 | # Abbreviations, such as rv32g -> rv32imaffd_zicsr_zifencei are supported. | ||
12 | # | ||
13 | # Profiles, such as rva22u64, are NOT supported, you must use ISA strings. | ||
14 | # | ||
15 | def riscv_isa_to_tune(isa): | ||
16 | _isa = isa.lower() | ||
17 | |||
18 | feature = [] | ||
19 | iter = 0 | ||
20 | |||
21 | # rv or riscv | ||
22 | if _isa[iter:].startswith('rv'): | ||
23 | feature.append('rv') | ||
24 | iter = iter + 2 | ||
25 | elif _isa[iter:].startswith('riscv'): | ||
26 | feature.append('rv') | ||
27 | iter = iter + 5 | ||
28 | else: | ||
29 | # Not a risc-v ISA! | ||
30 | return _isa | ||
31 | |||
32 | while (_isa[iter:]): | ||
33 | # Skip _ and whitespace | ||
34 | if _isa[iter] == '_' or _isa[iter].isspace(): | ||
35 | iter = iter + 1 | ||
36 | continue | ||
37 | |||
38 | # Length, just capture numbers here | ||
39 | if _isa[iter].isdigit(): | ||
40 | iter_end = iter | ||
41 | while iter_end < len(_isa) and _isa[iter_end].isdigit(): | ||
42 | iter_end = iter_end + 1 | ||
43 | |||
44 | feature.append(_isa[iter:iter_end]) | ||
45 | iter = iter_end | ||
46 | continue | ||
47 | |||
48 | # Typically i, e or g is next, followed by extensions. | ||
49 | # Extensions are single character, except for Z, Ss, Sh, Sm, Sv, and X | ||
50 | |||
51 | # If the extension starts with 'Z', 'S' or 'X' use the name until the next _, whitespace or end | ||
52 | if _isa[iter] in ['z', 's', 'x']: | ||
53 | ext_type = _isa[iter] | ||
54 | iter_end = iter + 1 | ||
55 | |||
56 | # Multicharacter extension, these are supposed to have a _ before the next multicharacter extension | ||
57 | # See 37.4 and 37.5: | ||
58 | # 37.4: Underscores "_" may be used to separate ISA extensions... | ||
59 | # 37.5: All multi-letter extensions ... must be separated from other multi-letter extensions by an underscore... | ||
60 | # Some extensions permit only alphabetic characters, while others allow alphanumeric chartacters | ||
61 | while iter_end < len(_isa) and _isa[iter_end] != "_" and not _isa[iter_end].isspace(): | ||
62 | iter_end = iter_end + 1 | ||
63 | |||
64 | feature.append(_isa[iter:iter_end]) | ||
65 | iter = iter_end | ||
66 | continue | ||
67 | |||
68 | # 'g' is special, it's an abbreviation for imafd_zicsr_zifencei | ||
69 | # When expanding the abbreviation, any additional letters must appear before the _z* extensions | ||
70 | if _isa[iter] == 'g': | ||
71 | _isa = 'imafd' + _isa[iter+1:] + '_zicsr_zifencei' | ||
72 | iter = 0 | ||
73 | continue | ||
74 | |||
75 | feature.append(_isa[iter]) | ||
76 | iter = iter + 1 | ||
77 | continue | ||
78 | |||
79 | # Eliminate duplicates, but preserve the order | ||
80 | feature = list(dict.fromkeys(feature)) | ||
81 | return ' '.join(feature) | ||
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py index 14a7d07ef0..a11db5f3cd 100644 --- a/meta/lib/oe/utils.py +++ b/meta/lib/oe/utils.py | |||
@@ -9,6 +9,8 @@ import multiprocessing | |||
9 | import traceback | 9 | import traceback |
10 | import errno | 10 | import errno |
11 | 11 | ||
12 | import bb.parse | ||
13 | |||
12 | def read_file(filename): | 14 | def read_file(filename): |
13 | try: | 15 | try: |
14 | f = open( filename, "r" ) | 16 | f = open( filename, "r" ) |
@@ -265,6 +267,7 @@ def execute_pre_post_process(d, cmds): | |||
265 | bb.note("Executing %s ..." % cmd) | 267 | bb.note("Executing %s ..." % cmd) |
266 | bb.build.exec_func(cmd, d) | 268 | bb.build.exec_func(cmd, d) |
267 | 269 | ||
270 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
268 | def get_bb_number_threads(d): | 271 | def get_bb_number_threads(d): |
269 | return int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1) | 272 | return int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1) |
270 | 273 | ||
@@ -316,7 +319,9 @@ def multiprocess_launch_mp(target, items, max_process, extraargs=None): | |||
316 | items = list(items) | 319 | items = list(items) |
317 | while (items and not errors) or launched: | 320 | while (items and not errors) or launched: |
318 | if not errors and items and len(launched) < max_process: | 321 | if not errors and items and len(launched) < max_process: |
319 | args = (items.pop(),) | 322 | args = items.pop() |
323 | if not type(args) is tuple: | ||
324 | args = (args,) | ||
320 | if extraargs is not None: | 325 | if extraargs is not None: |
321 | args = args + extraargs | 326 | args = args + extraargs |
322 | p = ProcessLaunch(target=target, args=args) | 327 | p = ProcessLaunch(target=target, args=args) |
@@ -465,7 +470,7 @@ def host_gcc_version(d, taskcontextonly=False): | |||
465 | version = match.group(1) | 470 | version = match.group(1) |
466 | return "-%s" % version if version in ("4.8", "4.9") else "" | 471 | return "-%s" % version if version in ("4.8", "4.9") else "" |
467 | 472 | ||
468 | 473 | @bb.parse.vardepsexclude("DEFAULTTUNE_MULTILIB_ORIGINAL", "OVERRIDES") | |
469 | def get_multilib_datastore(variant, d): | 474 | def get_multilib_datastore(variant, d): |
470 | localdata = bb.data.createCopy(d) | 475 | localdata = bb.data.createCopy(d) |
471 | if variant: | 476 | if variant: |
@@ -482,19 +487,6 @@ def get_multilib_datastore(variant, d): | |||
482 | localdata.setVar("MLPREFIX", "") | 487 | localdata.setVar("MLPREFIX", "") |
483 | return localdata | 488 | return localdata |
484 | 489 | ||
485 | class ImageQAFailed(Exception): | ||
486 | def __init__(self, description, name=None, logfile=None): | ||
487 | self.description = description | ||
488 | self.name = name | ||
489 | self.logfile=logfile | ||
490 | |||
491 | def __str__(self): | ||
492 | msg = 'Function failed: %s' % self.name | ||
493 | if self.description: | ||
494 | msg = msg + ' (%s)' % self.description | ||
495 | |||
496 | return msg | ||
497 | |||
498 | def sh_quote(string): | 490 | def sh_quote(string): |
499 | import shlex | 491 | import shlex |
500 | return shlex.quote(string) | 492 | return shlex.quote(string) |
diff --git a/meta/lib/oeqa/sdk/buildtools-docs-cases/README b/meta/lib/oeqa/buildtools-docs/cases/README index f8edbc7dad..f8edbc7dad 100644 --- a/meta/lib/oeqa/sdk/buildtools-docs-cases/README +++ b/meta/lib/oeqa/buildtools-docs/cases/README | |||
diff --git a/meta/lib/oeqa/sdk/buildtools-docs-cases/build.py b/meta/lib/oeqa/buildtools-docs/cases/build.py index 6e3ee94292..6e3ee94292 100644 --- a/meta/lib/oeqa/sdk/buildtools-docs-cases/build.py +++ b/meta/lib/oeqa/buildtools-docs/cases/build.py | |||
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/README b/meta/lib/oeqa/buildtools/cases/README index d4f20faa9f..d4f20faa9f 100644 --- a/meta/lib/oeqa/sdk/buildtools-cases/README +++ b/meta/lib/oeqa/buildtools/cases/README | |||
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/build.py b/meta/lib/oeqa/buildtools/cases/build.py index c85c32496b..c85c32496b 100644 --- a/meta/lib/oeqa/sdk/buildtools-cases/build.py +++ b/meta/lib/oeqa/buildtools/cases/build.py | |||
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/gcc.py b/meta/lib/oeqa/buildtools/cases/gcc.py index a62c4d0bc4..a62c4d0bc4 100644 --- a/meta/lib/oeqa/sdk/buildtools-cases/gcc.py +++ b/meta/lib/oeqa/buildtools/cases/gcc.py | |||
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/https.py b/meta/lib/oeqa/buildtools/cases/https.py index 4525e3d758..4525e3d758 100644 --- a/meta/lib/oeqa/sdk/buildtools-cases/https.py +++ b/meta/lib/oeqa/buildtools/cases/https.py | |||
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/sanity.py b/meta/lib/oeqa/buildtools/cases/sanity.py index a55d456656..a55d456656 100644 --- a/meta/lib/oeqa/sdk/buildtools-cases/sanity.py +++ b/meta/lib/oeqa/buildtools/cases/sanity.py | |||
diff --git a/meta/lib/oeqa/core/case.py b/meta/lib/oeqa/core/case.py index bc4446a938..ad5524a714 100644 --- a/meta/lib/oeqa/core/case.py +++ b/meta/lib/oeqa/core/case.py | |||
@@ -5,6 +5,7 @@ | |||
5 | # | 5 | # |
6 | 6 | ||
7 | import base64 | 7 | import base64 |
8 | import os | ||
8 | import zlib | 9 | import zlib |
9 | import unittest | 10 | import unittest |
10 | 11 | ||
@@ -57,6 +58,13 @@ class OETestCase(unittest.TestCase): | |||
57 | d.tearDownDecorator() | 58 | d.tearDownDecorator() |
58 | self.tearDownMethod() | 59 | self.tearDownMethod() |
59 | 60 | ||
61 | def assertFileExists(self, filename, msg=None): | ||
62 | """ | ||
63 | Test that filename exists. If it does not, the test will fail. | ||
64 | """ | ||
65 | if not os.path.exists(filename): | ||
66 | self.fail(msg or "%s does not exist" % filename) | ||
67 | |||
60 | class OEPTestResultTestCase: | 68 | class OEPTestResultTestCase: |
61 | """ | 69 | """ |
62 | Mix-in class to provide functions to make interacting with extraresults for | 70 | Mix-in class to provide functions to make interacting with extraresults for |
diff --git a/meta/lib/oeqa/core/decorator/data.py b/meta/lib/oeqa/core/decorator/data.py index 5444b2cb75..0daf46334f 100644 --- a/meta/lib/oeqa/core/decorator/data.py +++ b/meta/lib/oeqa/core/decorator/data.py | |||
@@ -228,3 +228,15 @@ class skipIfNotArch(OETestDecorator): | |||
228 | arch = self.case.td['HOST_ARCH'] | 228 | arch = self.case.td['HOST_ARCH'] |
229 | if arch not in self.archs: | 229 | if arch not in self.archs: |
230 | self.case.skipTest('Test skipped on %s' % arch) | 230 | self.case.skipTest('Test skipped on %s' % arch) |
231 | |||
232 | @registerDecorator | ||
233 | class skipIfNotBuildArch(OETestDecorator): | ||
234 | """ | ||
235 | Skip test if BUILD_ARCH is not present in the tuple specified. | ||
236 | """ | ||
237 | |||
238 | attrs = ('archs',) | ||
239 | def setUpDecorator(self): | ||
240 | arch = self.case.td['BUILD_ARCH'] | ||
241 | if arch not in self.archs: | ||
242 | self.case.skipTest('Test skipped on %s' % arch) | ||
diff --git a/meta/lib/oeqa/core/runner.py b/meta/lib/oeqa/core/runner.py index a86a706bd9..b683d9b80a 100644 --- a/meta/lib/oeqa/core/runner.py +++ b/meta/lib/oeqa/core/runner.py | |||
@@ -357,7 +357,7 @@ class OETestResultJSONHelper(object): | |||
357 | os.makedirs(write_dir, exist_ok=True) | 357 | os.makedirs(write_dir, exist_ok=True) |
358 | test_results = self._get_existing_testresults_if_available(write_dir) | 358 | test_results = self._get_existing_testresults_if_available(write_dir) |
359 | test_results[result_id] = {'configuration': configuration, 'result': test_result} | 359 | test_results[result_id] = {'configuration': configuration, 'result': test_result} |
360 | json_testresults = json.dumps(test_results, sort_keys=True, indent=4) | 360 | json_testresults = json.dumps(test_results, sort_keys=True, indent=1) |
361 | self._write_file(write_dir, self.testresult_filename, json_testresults) | 361 | self._write_file(write_dir, self.testresult_filename, json_testresults) |
362 | if has_bb: | 362 | if has_bb: |
363 | bb.utils.unlockfile(lf) | 363 | bb.utils.unlockfile(lf) |
diff --git a/meta/lib/oeqa/core/target/serial.py b/meta/lib/oeqa/core/target/serial.py new file mode 100644 index 0000000000..7c2cd8b248 --- /dev/null +++ b/meta/lib/oeqa/core/target/serial.py | |||
@@ -0,0 +1,315 @@ | |||
1 | # | ||
2 | # SPDX-License-Identifier: MIT | ||
3 | # | ||
4 | |||
5 | import base64 | ||
6 | import logging | ||
7 | import os | ||
8 | from threading import Lock | ||
9 | from . import OETarget | ||
10 | |||
11 | class OESerialTarget(OETarget): | ||
12 | |||
13 | def __init__(self, logger, target_ip, server_ip, server_port=0, | ||
14 | timeout=300, serialcontrol_cmd=None, serialcontrol_extra_args=None, | ||
15 | serialcontrol_ps1=None, serialcontrol_connect_timeout=None, | ||
16 | machine=None, **kwargs): | ||
17 | if not logger: | ||
18 | logger = logging.getLogger('target') | ||
19 | logger.setLevel(logging.INFO) | ||
20 | filePath = os.path.join(os.getcwd(), 'remoteTarget.log') | ||
21 | fileHandler = logging.FileHandler(filePath, 'w', 'utf-8') | ||
22 | formatter = logging.Formatter( | ||
23 | '%(asctime)s.%(msecs)03d %(levelname)s: %(message)s', | ||
24 | '%H:%M:%S') | ||
25 | fileHandler.setFormatter(formatter) | ||
26 | logger.addHandler(fileHandler) | ||
27 | |||
28 | super(OESerialTarget, self).__init__(logger) | ||
29 | |||
30 | if serialcontrol_ps1: | ||
31 | self.target_ps1 = serialcontrol_ps1 | ||
32 | elif machine: | ||
33 | # fallback to a default value which assumes root@machine | ||
34 | self.target_ps1 = f'root@{machine}:.*# ' | ||
35 | else: | ||
36 | raise ValueError("Unable to determine shell command prompt (PS1) format.") | ||
37 | |||
38 | if not serialcontrol_cmd: | ||
39 | raise ValueError("Unable to determine serial control command.") | ||
40 | |||
41 | if serialcontrol_extra_args: | ||
42 | self.connection_script = f'{serialcontrol_cmd} {serialcontrol_extra_args}' | ||
43 | else: | ||
44 | self.connection_script = serialcontrol_cmd | ||
45 | |||
46 | if serialcontrol_connect_timeout: | ||
47 | self.connect_timeout = serialcontrol_connect_timeout | ||
48 | else: | ||
49 | self.connect_timeout = 10 # default to 10s connection timeout | ||
50 | |||
51 | self.default_command_timeout = timeout | ||
52 | self.ip = target_ip | ||
53 | self.server_ip = server_ip | ||
54 | self.server_port = server_port | ||
55 | self.conn = None | ||
56 | self.mutex = Lock() | ||
57 | |||
58 | def start(self, **kwargs): | ||
59 | pass | ||
60 | |||
61 | def stop(self, **kwargs): | ||
62 | pass | ||
63 | |||
64 | def get_connection(self): | ||
65 | if self.conn is None: | ||
66 | self.conn = SerialConnection(self.connection_script, | ||
67 | self.target_ps1, | ||
68 | self.connect_timeout, | ||
69 | self.default_command_timeout) | ||
70 | |||
71 | return self.conn | ||
72 | |||
73 | def run(self, cmd, timeout=None): | ||
74 | """ | ||
75 | Runs command on target over the provided serial connection. | ||
76 | The first call will open the connection, and subsequent | ||
77 | calls will re-use the same connection to send new commands. | ||
78 | |||
79 | command: Command to run on target. | ||
80 | timeout: <value>: Kill command after <val> seconds. | ||
81 | None: Kill command default value seconds. | ||
82 | 0: No timeout, runs until return. | ||
83 | """ | ||
84 | # Lock needed to avoid multiple threads running commands concurrently | ||
85 | # A serial connection can only be used by one caller at a time | ||
86 | with self.mutex: | ||
87 | conn = self.get_connection() | ||
88 | |||
89 | self.logger.debug(f"[Running]$ {cmd}") | ||
90 | # Run the command, then echo $? to get the command's return code | ||
91 | try: | ||
92 | output = conn.run_command(cmd, timeout) | ||
93 | status = conn.run_command("echo $?") | ||
94 | self.logger.debug(f" [stdout]: {output}") | ||
95 | self.logger.debug(f" [ret code]: {status}\n\n") | ||
96 | except SerialTimeoutException as e: | ||
97 | self.logger.debug(e) | ||
98 | output = "" | ||
99 | status = 255 | ||
100 | |||
101 | # Return to $HOME after each command to simulate a stateless SSH connection | ||
102 | conn.run_command('cd "$HOME"') | ||
103 | |||
104 | return (int(status), output) | ||
105 | |||
106 | def copyTo(self, localSrc, remoteDst): | ||
107 | """ | ||
108 | Copies files by converting them to base 32, then transferring | ||
109 | the ASCII text to the target, and decoding it in place on the | ||
110 | target. | ||
111 | |||
112 | On a 115k baud serial connection, this method transfers at | ||
113 | roughly 30kbps. | ||
114 | """ | ||
115 | with open(localSrc, 'rb') as file: | ||
116 | data = file.read() | ||
117 | |||
118 | b32 = base64.b32encode(data).decode('utf-8') | ||
119 | |||
120 | # To avoid shell line limits, send a chunk at a time | ||
121 | SPLIT_LEN = 512 | ||
122 | lines = [b32[i:i+SPLIT_LEN] for i in range(0, len(b32), SPLIT_LEN)] | ||
123 | |||
124 | with self.mutex: | ||
125 | conn = self.get_connection() | ||
126 | |||
127 | filename = os.path.basename(localSrc) | ||
128 | TEMP = f'/tmp/{filename}.b32' | ||
129 | |||
130 | # Create or empty out the temp file | ||
131 | conn.run_command(f'echo -n "" > {TEMP}') | ||
132 | |||
133 | for line in lines: | ||
134 | conn.run_command(f'echo -n {line} >> {TEMP}') | ||
135 | |||
136 | # Check to see whether the remoteDst is a directory | ||
137 | is_directory = conn.run_command(f'[[ -d {remoteDst} ]]; echo $?') | ||
138 | if int(is_directory) == 0: | ||
139 | # append the localSrc filename to the end of remoteDst | ||
140 | remoteDst = os.path.join(remoteDst, filename) | ||
141 | |||
142 | conn.run_command(f'base32 -d {TEMP} > {remoteDst}') | ||
143 | conn.run_command(f'rm {TEMP}') | ||
144 | |||
145 | return 0, 'Success' | ||
146 | |||
147 | def copyFrom(self, remoteSrc, localDst): | ||
148 | """ | ||
149 | Copies files by converting them to base 32 on the target, then | ||
150 | transferring the ASCII text to the host. That text is then | ||
151 | decoded here and written out to the destination. | ||
152 | |||
153 | On a 115k baud serial connection, this method transfers at | ||
154 | roughly 30kbps. | ||
155 | """ | ||
156 | with self.mutex: | ||
157 | b32 = self.get_connection().run_command(f'base32 {remoteSrc}') | ||
158 | |||
159 | data = base64.b32decode(b32.replace('\r\n', '')) | ||
160 | |||
161 | # If the local path is a directory, get the filename from | ||
162 | # the remoteSrc path and append it to localDst | ||
163 | if os.path.isdir(localDst): | ||
164 | filename = os.path.basename(remoteSrc) | ||
165 | localDst = os.path.join(localDst, filename) | ||
166 | |||
167 | with open(localDst, 'wb') as file: | ||
168 | file.write(data) | ||
169 | |||
170 | return 0, 'Success' | ||
171 | |||
172 | def copyDirTo(self, localSrc, remoteDst): | ||
173 | """ | ||
174 | Copy recursively localSrc directory to remoteDst in target. | ||
175 | """ | ||
176 | |||
177 | for root, dirs, files in os.walk(localSrc): | ||
178 | # Create directories in the target as needed | ||
179 | for d in dirs: | ||
180 | tmpDir = os.path.join(root, d).replace(localSrc, "") | ||
181 | newDir = os.path.join(remoteDst, tmpDir.lstrip("/")) | ||
182 | cmd = "mkdir -p %s" % newDir | ||
183 | self.run(cmd) | ||
184 | |||
185 | # Copy files into the target | ||
186 | for f in files: | ||
187 | tmpFile = os.path.join(root, f).replace(localSrc, "") | ||
188 | dstFile = os.path.join(remoteDst, tmpFile.lstrip("/")) | ||
189 | srcFile = os.path.join(root, f) | ||
190 | self.copyTo(srcFile, dstFile) | ||
191 | |||
192 | def deleteFiles(self, remotePath, files): | ||
193 | """ | ||
194 | Deletes files in target's remotePath. | ||
195 | """ | ||
196 | |||
197 | cmd = "rm" | ||
198 | if not isinstance(files, list): | ||
199 | files = [files] | ||
200 | |||
201 | for f in files: | ||
202 | cmd = "%s %s" % (cmd, os.path.join(remotePath, f)) | ||
203 | |||
204 | self.run(cmd) | ||
205 | |||
206 | def deleteDir(self, remotePath): | ||
207 | """ | ||
208 | Deletes target's remotePath directory. | ||
209 | """ | ||
210 | |||
211 | cmd = "rmdir %s" % remotePath | ||
212 | self.run(cmd) | ||
213 | |||
214 | def deleteDirStructure(self, localPath, remotePath): | ||
215 | """ | ||
216 | Delete recursively localPath structure directory in target's remotePath. | ||
217 | |||
218 | This function is useful to delete a package that is installed in the | ||
219 | device under test (DUT) and the host running the test has such package | ||
220 | extracted in tmp directory. | ||
221 | |||
222 | Example: | ||
223 | pwd: /home/user/tmp | ||
224 | tree: . | ||
225 | └── work | ||
226 | ├── dir1 | ||
227 | │ └── file1 | ||
228 | └── dir2 | ||
229 | |||
230 | localpath = "/home/user/tmp" and remotepath = "/home/user" | ||
231 | |||
232 | With the above variables this function will try to delete the | ||
233 | directory in the DUT in this order: | ||
234 | /home/user/work/dir1/file1 | ||
235 | /home/user/work/dir1 (if dir is empty) | ||
236 | /home/user/work/dir2 (if dir is empty) | ||
237 | /home/user/work (if dir is empty) | ||
238 | """ | ||
239 | |||
240 | for root, dirs, files in os.walk(localPath, topdown=False): | ||
241 | # Delete files first | ||
242 | tmpDir = os.path.join(root).replace(localPath, "") | ||
243 | remoteDir = os.path.join(remotePath, tmpDir.lstrip("/")) | ||
244 | self.deleteFiles(remoteDir, files) | ||
245 | |||
246 | # Remove dirs if empty | ||
247 | for d in dirs: | ||
248 | tmpDir = os.path.join(root, d).replace(localPath, "") | ||
249 | remoteDir = os.path.join(remotePath, tmpDir.lstrip("/")) | ||
250 | self.deleteDir(remoteDir) | ||
251 | |||
252 | class SerialTimeoutException(Exception): | ||
253 | def __init__(self, msg): | ||
254 | self.msg = msg | ||
255 | def __str__(self): | ||
256 | return self.msg | ||
257 | |||
258 | class SerialConnection: | ||
259 | |||
260 | def __init__(self, script, target_prompt, connect_timeout, default_command_timeout): | ||
261 | import pexpect # limiting scope to avoid build dependency | ||
262 | self.prompt = target_prompt | ||
263 | self.connect_timeout = connect_timeout | ||
264 | self.default_command_timeout = default_command_timeout | ||
265 | self.conn = pexpect.spawn('/bin/bash', ['-c', script], encoding='utf8') | ||
266 | self._seek_to_clean_shell() | ||
267 | # Disable echo to avoid the need to parse the outgoing command | ||
268 | self.run_command('stty -echo') | ||
269 | |||
270 | def _seek_to_clean_shell(self): | ||
271 | """ | ||
272 | Attempts to find a clean shell, meaning it is clear and | ||
273 | ready to accept a new command. This is necessary to ensure | ||
274 | the correct output is captured from each command. | ||
275 | """ | ||
276 | import pexpect # limiting scope to avoid build dependency | ||
277 | # Look for a clean shell | ||
278 | # Wait a short amount of time for the connection to finish | ||
279 | pexpect_code = self.conn.expect([self.prompt, pexpect.TIMEOUT], | ||
280 | timeout=self.connect_timeout) | ||
281 | |||
282 | # if a timeout occurred, send an empty line and wait for a clean shell | ||
283 | if pexpect_code == 1: | ||
284 | # send a newline to clear and present the shell | ||
285 | self.conn.sendline("") | ||
286 | pexpect_code = self.conn.expect(self.prompt) | ||
287 | |||
288 | def run_command(self, cmd, timeout=None): | ||
289 | """ | ||
290 | Runs command on target over the provided serial connection. | ||
291 | Returns any output on the shell while the command was run. | ||
292 | |||
293 | command: Command to run on target. | ||
294 | timeout: <value>: Kill command after <val> seconds. | ||
295 | None: Kill command default value seconds. | ||
296 | 0: No timeout, runs until return. | ||
297 | """ | ||
298 | import pexpect # limiting scope to avoid build dependency | ||
299 | # Convert from the OETarget defaults to pexpect timeout values | ||
300 | if timeout is None: | ||
301 | timeout = self.default_command_timeout | ||
302 | elif timeout == 0: | ||
303 | timeout = None # passing None to pexpect is infinite timeout | ||
304 | |||
305 | self.conn.sendline(cmd) | ||
306 | pexpect_code = self.conn.expect([self.prompt, pexpect.TIMEOUT], timeout=timeout) | ||
307 | |||
308 | # check for timeout | ||
309 | if pexpect_code == 1: | ||
310 | self.conn.send('\003') # send Ctrl+C | ||
311 | self._seek_to_clean_shell() | ||
312 | raise SerialTimeoutException(f'Timeout executing: {cmd} after {timeout}s') | ||
313 | |||
314 | return self.conn.before.removesuffix('\r\n') | ||
315 | |||
diff --git a/meta/lib/oeqa/core/target/ssh.py b/meta/lib/oeqa/core/target/ssh.py index 09cdd14c75..8b5c450a05 100644 --- a/meta/lib/oeqa/core/target/ssh.py +++ b/meta/lib/oeqa/core/target/ssh.py | |||
@@ -55,14 +55,14 @@ class OESSHTarget(OETarget): | |||
55 | def stop(self, **kwargs): | 55 | def stop(self, **kwargs): |
56 | pass | 56 | pass |
57 | 57 | ||
58 | def _run(self, command, timeout=None, ignore_status=True): | 58 | def _run(self, command, timeout=None, ignore_status=True, raw=False): |
59 | """ | 59 | """ |
60 | Runs command in target using SSHProcess. | 60 | Runs command in target using SSHProcess. |
61 | """ | 61 | """ |
62 | self.logger.debug("[Running]$ %s" % " ".join(command)) | 62 | self.logger.debug("[Running]$ %s" % " ".join(command)) |
63 | 63 | ||
64 | starttime = time.time() | 64 | starttime = time.time() |
65 | status, output = SSHCall(command, self.logger, timeout) | 65 | status, output = SSHCall(command, self.logger, timeout, raw) |
66 | self.logger.debug("[Command returned '%d' after %.2f seconds]" | 66 | self.logger.debug("[Command returned '%d' after %.2f seconds]" |
67 | "" % (status, time.time() - starttime)) | 67 | "" % (status, time.time() - starttime)) |
68 | 68 | ||
@@ -72,7 +72,7 @@ class OESSHTarget(OETarget): | |||
72 | 72 | ||
73 | return (status, output) | 73 | return (status, output) |
74 | 74 | ||
75 | def run(self, command, timeout=None, ignore_status=True): | 75 | def run(self, command, timeout=None, ignore_status=True, raw=False): |
76 | """ | 76 | """ |
77 | Runs command in target. | 77 | Runs command in target. |
78 | 78 | ||
@@ -91,8 +91,11 @@ class OESSHTarget(OETarget): | |||
91 | else: | 91 | else: |
92 | processTimeout = self.timeout | 92 | processTimeout = self.timeout |
93 | 93 | ||
94 | status, output = self._run(sshCmd, processTimeout, ignore_status) | 94 | status, output = self._run(sshCmd, processTimeout, ignore_status, raw) |
95 | self.logger.debug('Command: %s\nStatus: %d Output: %s\n' % (command, status, output)) | 95 | if len(output) > (64 * 1024): |
96 | self.logger.debug('Command: %s\nStatus: %d Output length: %s\n' % (command, status, len(output))) | ||
97 | else: | ||
98 | self.logger.debug('Command: %s\nStatus: %d Output: %s\n' % (command, status, output)) | ||
96 | 99 | ||
97 | return (status, output) | 100 | return (status, output) |
98 | 101 | ||
@@ -206,22 +209,23 @@ class OESSHTarget(OETarget): | |||
206 | remoteDir = os.path.join(remotePath, tmpDir.lstrip("/")) | 209 | remoteDir = os.path.join(remotePath, tmpDir.lstrip("/")) |
207 | self.deleteDir(remoteDir) | 210 | self.deleteDir(remoteDir) |
208 | 211 | ||
209 | def SSHCall(command, logger, timeout=None, **opts): | 212 | def SSHCall(command, logger, timeout=None, raw=False, **opts): |
210 | 213 | ||
211 | def run(): | 214 | def run(): |
212 | nonlocal output | 215 | nonlocal output |
213 | nonlocal process | 216 | nonlocal process |
214 | output_raw = b'' | 217 | output_raw = bytearray() |
215 | starttime = time.time() | 218 | starttime = time.time() |
219 | progress = time.time() | ||
216 | process = subprocess.Popen(command, **options) | 220 | process = subprocess.Popen(command, **options) |
217 | has_timeout = False | 221 | has_timeout = False |
222 | appendline = None | ||
218 | if timeout: | 223 | if timeout: |
219 | endtime = starttime + timeout | 224 | endtime = starttime + timeout |
220 | eof = False | 225 | eof = False |
221 | os.set_blocking(process.stdout.fileno(), False) | 226 | os.set_blocking(process.stdout.fileno(), False) |
222 | while not has_timeout and not eof: | 227 | while not has_timeout and not eof: |
223 | try: | 228 | try: |
224 | logger.debug('Waiting for process output: time: %s, endtime: %s' % (time.time(), endtime)) | ||
225 | if select.select([process.stdout], [], [], 5)[0] != []: | 229 | if select.select([process.stdout], [], [], 5)[0] != []: |
226 | # wait a bit for more data, tries to avoid reading single characters | 230 | # wait a bit for more data, tries to avoid reading single characters |
227 | time.sleep(0.2) | 231 | time.sleep(0.2) |
@@ -229,9 +233,9 @@ def SSHCall(command, logger, timeout=None, **opts): | |||
229 | if not data: | 233 | if not data: |
230 | eof = True | 234 | eof = True |
231 | else: | 235 | else: |
232 | output_raw += data | 236 | output_raw.extend(data) |
233 | # ignore errors to capture as much as possible | 237 | # ignore errors to capture as much as possible |
234 | logger.debug('Partial data from SSH call:\n%s' % data.decode('utf-8', errors='ignore')) | 238 | #logger.debug('Partial data from SSH call:\n%s' % data.decode('utf-8', errors='ignore')) |
235 | endtime = time.time() + timeout | 239 | endtime = time.time() + timeout |
236 | except InterruptedError: | 240 | except InterruptedError: |
237 | logger.debug('InterruptedError') | 241 | logger.debug('InterruptedError') |
@@ -244,6 +248,10 @@ def SSHCall(command, logger, timeout=None, **opts): | |||
244 | logger.debug('SSHCall has timeout! Time: %s, endtime: %s' % (time.time(), endtime)) | 248 | logger.debug('SSHCall has timeout! Time: %s, endtime: %s' % (time.time(), endtime)) |
245 | has_timeout = True | 249 | has_timeout = True |
246 | 250 | ||
251 | if time.time() >= (progress + 60): | ||
252 | logger.debug('Waiting for process output at time: %s with datasize: %s' % (time.time(), len(output_raw))) | ||
253 | progress = time.time() | ||
254 | |||
247 | process.stdout.close() | 255 | process.stdout.close() |
248 | 256 | ||
249 | # process hasn't returned yet | 257 | # process hasn't returned yet |
@@ -256,17 +264,29 @@ def SSHCall(command, logger, timeout=None, **opts): | |||
256 | logger.debug('OSError when killing process') | 264 | logger.debug('OSError when killing process') |
257 | pass | 265 | pass |
258 | endtime = time.time() - starttime | 266 | endtime = time.time() - starttime |
259 | lastline = ("\nProcess killed - no output for %d seconds. Total" | 267 | appendline = ("\nProcess killed - no output for %d seconds. Total" |
260 | " running time: %d seconds." % (timeout, endtime)) | 268 | " running time: %d seconds." % (timeout, endtime)) |
261 | logger.debug('Received data from SSH call:\n%s ' % lastline) | 269 | logger.debug('Received data from SSH call:\n%s ' % appendline) |
262 | output += lastline | ||
263 | process.wait() | 270 | process.wait() |
264 | 271 | ||
272 | if raw: | ||
273 | output = bytes(output_raw) | ||
274 | if appendline: | ||
275 | output += bytes(appendline, "utf-8") | ||
276 | else: | ||
277 | output = output_raw.decode('utf-8', errors='ignore') | ||
278 | if appendline: | ||
279 | output += appendline | ||
265 | else: | 280 | else: |
266 | output_raw = process.communicate()[0] | 281 | output = output_raw = process.communicate()[0] |
282 | if not raw: | ||
283 | output = output_raw.decode('utf-8', errors='ignore') | ||
267 | 284 | ||
268 | output = output_raw.decode('utf-8', errors='ignore') | 285 | if len(output) < (64 * 1024): |
269 | logger.debug('Data from SSH call:\n%s' % output.rstrip()) | 286 | if output.rstrip(): |
287 | logger.debug('Data from SSH call:\n%s' % output.rstrip()) | ||
288 | else: | ||
289 | logger.debug('No output from SSH call') | ||
270 | 290 | ||
271 | # timout or not, make sure process exits and is not hanging | 291 | # timout or not, make sure process exits and is not hanging |
272 | if process.returncode == None: | 292 | if process.returncode == None: |
@@ -292,7 +312,7 @@ def SSHCall(command, logger, timeout=None, **opts): | |||
292 | 312 | ||
293 | options = { | 313 | options = { |
294 | "stdout": subprocess.PIPE, | 314 | "stdout": subprocess.PIPE, |
295 | "stderr": subprocess.STDOUT, | 315 | "stderr": subprocess.STDOUT if not raw else None, |
296 | "stdin": None, | 316 | "stdin": None, |
297 | "shell": False, | 317 | "shell": False, |
298 | "bufsize": -1, | 318 | "bufsize": -1, |
@@ -320,4 +340,4 @@ def SSHCall(command, logger, timeout=None, **opts): | |||
320 | logger.debug('Something went wrong, killing SSH process') | 340 | logger.debug('Something went wrong, killing SSH process') |
321 | raise | 341 | raise |
322 | 342 | ||
323 | return (process.returncode, output.rstrip()) | 343 | return (process.returncode, output if raw else output.rstrip()) |
diff --git a/meta/lib/oeqa/core/tests/common.py b/meta/lib/oeqa/core/tests/common.py index 88cc758ad3..bcc4fde632 100644 --- a/meta/lib/oeqa/core/tests/common.py +++ b/meta/lib/oeqa/core/tests/common.py | |||
@@ -9,7 +9,6 @@ import os | |||
9 | 9 | ||
10 | import unittest | 10 | import unittest |
11 | import logging | 11 | import logging |
12 | import os | ||
13 | 12 | ||
14 | logger = logging.getLogger("oeqa") | 13 | logger = logging.getLogger("oeqa") |
15 | logger.setLevel(logging.INFO) | 14 | logger.setLevel(logging.INFO) |
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml b/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml index de95025e86..a78ada2593 100644 --- a/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml +++ b/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml | |||
@@ -14,7 +14,7 @@ crate-type = ["cdylib"] | |||
14 | rand = "0.8.4" | 14 | rand = "0.8.4" |
15 | 15 | ||
16 | [dependencies.pyo3] | 16 | [dependencies.pyo3] |
17 | version = "0.19.0" | 17 | version = "0.24.1" |
18 | # "abi3-py38" tells pyo3 (and maturin) to build using the stable ABI with minimum Python version 3.8 | 18 | # "abi3-py38" tells pyo3 (and maturin) to build using the stable ABI with minimum Python version 3.8 |
19 | features = ["abi3-py38"] | 19 | features = ["abi3-py38"] |
20 | 20 | ||
diff --git a/meta/lib/oeqa/manual/crops.json b/meta/lib/oeqa/manual/crops.json deleted file mode 100644 index 5cfa653843..0000000000 --- a/meta/lib/oeqa/manual/crops.json +++ /dev/null | |||
@@ -1,294 +0,0 @@ | |||
1 | [ | ||
2 | { | ||
3 | "test": { | ||
4 | "@alias": "crops-default.crops-default.sdkext_eSDK_devtool_build_make", | ||
5 | "author": [ | ||
6 | { | ||
7 | "email": "francisco.j.pedraza.gonzalez@intel.com", | ||
8 | "name": "francisco.j.pedraza.gonzalez@intel.com" | ||
9 | } | ||
10 | ], | ||
11 | "execution": { | ||
12 | "1": { | ||
13 | "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n", | ||
14 | "expected_results": "" | ||
15 | }, | ||
16 | "2": { | ||
17 | "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n", | ||
18 | "expected_results": "" | ||
19 | }, | ||
20 | "3": { | ||
21 | "action": "Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n\n", | ||
22 | "expected_results": "" | ||
23 | }, | ||
24 | "4": { | ||
25 | "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n", | ||
26 | "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n" | ||
27 | }, | ||
28 | "5": { | ||
29 | "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n", | ||
30 | "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces." | ||
31 | }, | ||
32 | "6": { | ||
33 | "action": " source environment-setup-i586-poky-linux \n\n", | ||
34 | "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n" | ||
35 | }, | ||
36 | "7": { | ||
37 | "action": " run command which devtool \n\n", | ||
38 | "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n " | ||
39 | }, | ||
40 | "8": { | ||
41 | "action": "devtool add myapp <directory>(this is myapp dir) \n\n\n", | ||
42 | "expected_results": "The directory you should input is the myapp directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb" | ||
43 | }, | ||
44 | "9": { | ||
45 | "action": " devtool build myapp \n\n", | ||
46 | "expected_results": "This should compile an image" | ||
47 | }, | ||
48 | "10": { | ||
49 | "action": " devtool reset myapp ", | ||
50 | "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase." | ||
51 | } | ||
52 | }, | ||
53 | "summary": "sdkext_eSDK_devtool_build_make" | ||
54 | } | ||
55 | }, | ||
56 | { | ||
57 | "test": { | ||
58 | "@alias": "crops-default.crops-default.sdkext_devtool_build_esdk_package", | ||
59 | "author": [ | ||
60 | { | ||
61 | "email": "francisco.j.pedraza.gonzalez@intel.com", | ||
62 | "name": "francisco.j.pedraza.gonzalez@intel.com" | ||
63 | } | ||
64 | ], | ||
65 | "execution": { | ||
66 | "1": { | ||
67 | "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n", | ||
68 | "expected_results": "" | ||
69 | }, | ||
70 | "2": { | ||
71 | "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n", | ||
72 | "expected_results": "" | ||
73 | }, | ||
74 | "3": { | ||
75 | "action": " Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp/ \n <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n", | ||
76 | "expected_results": "" | ||
77 | }, | ||
78 | "4": { | ||
79 | "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n", | ||
80 | "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n" | ||
81 | }, | ||
82 | "5": { | ||
83 | "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include<stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n", | ||
84 | "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces. \n\n" | ||
85 | }, | ||
86 | "6": { | ||
87 | "action": " source environment-setup-i586-poky-linux \n\n", | ||
88 | "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n" | ||
89 | }, | ||
90 | "7": { | ||
91 | "action": " run command which devtool \n\n", | ||
92 | "expected_results": " this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n" | ||
93 | }, | ||
94 | "8": { | ||
95 | "action": " devtool add myapp <directory> (this is myapp dir) \n\n", | ||
96 | "expected_results": " The directory you should input is the myapp directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb \n\n" | ||
97 | }, | ||
98 | "9": { | ||
99 | "action": " devtool package myapp \n\n", | ||
100 | "expected_results": " you should expect a package creation of myapp and it should be under the /tmp/deploy/ \n\n" | ||
101 | }, | ||
102 | "10": { | ||
103 | "action": " devtool reset myapp ", | ||
104 | "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase.\n</package_format>" | ||
105 | } | ||
106 | }, | ||
107 | "summary": "sdkext_devtool_build_esdk_package" | ||
108 | } | ||
109 | }, | ||
110 | { | ||
111 | "test": { | ||
112 | "@alias": "crops-default.crops-default.sdkext_devtool_build_cmake", | ||
113 | "author": [ | ||
114 | { | ||
115 | "email": "francisco.j.pedraza.gonzalez@intel.com", | ||
116 | "name": "francisco.j.pedraza.gonzalez@intel.com" | ||
117 | } | ||
118 | ], | ||
119 | "execution": { | ||
120 | "1": { | ||
121 | "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n", | ||
122 | "expected_results": "" | ||
123 | }, | ||
124 | "2": { | ||
125 | "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n", | ||
126 | "expected_results": "" | ||
127 | }, | ||
128 | "3": { | ||
129 | "action": " Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp \n <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n", | ||
130 | "expected_results": "" | ||
131 | }, | ||
132 | "4": { | ||
133 | "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n", | ||
134 | "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n" | ||
135 | }, | ||
136 | "5": { | ||
137 | "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n", | ||
138 | "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces. \n\n" | ||
139 | }, | ||
140 | "6": { | ||
141 | "action": " source environment-setup-i586-poky-linux \n\n", | ||
142 | "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n" | ||
143 | }, | ||
144 | "7": { | ||
145 | "action": " run command which devtool \n\n", | ||
146 | "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n" | ||
147 | }, | ||
148 | "8": { | ||
149 | "action": " devtool add myapp <directory> (this is myapp_cmake dir) \n\n", | ||
150 | "expected_results": "The directory you should input is the myapp_cmake directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb \n\n" | ||
151 | }, | ||
152 | "9": { | ||
153 | "action": " devtool build myapp \n\n", | ||
154 | "expected_results": "This should compile an image \n\n" | ||
155 | }, | ||
156 | "10": { | ||
157 | "action": " devtool reset myapp ", | ||
158 | "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase. " | ||
159 | } | ||
160 | }, | ||
161 | "summary": "sdkext_devtool_build_cmake" | ||
162 | } | ||
163 | }, | ||
164 | { | ||
165 | "test": { | ||
166 | "@alias": "crops-default.crops-default.sdkext_extend_autotools_recipe_creation", | ||
167 | "author": [ | ||
168 | { | ||
169 | "email": "francisco.j.pedraza.gonzalez@intel.com", | ||
170 | "name": "francisco.j.pedraza.gonzalez@intel.com" | ||
171 | } | ||
172 | ], | ||
173 | "execution": { | ||
174 | "1": { | ||
175 | "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n", | ||
176 | "expected_results": "" | ||
177 | }, | ||
178 | "2": { | ||
179 | "action": "Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n", | ||
180 | "expected_results": "" | ||
181 | }, | ||
182 | "3": { | ||
183 | "action": " source environment-setup-i586-poky-linux \n\n", | ||
184 | "expected_results": " This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n" | ||
185 | }, | ||
186 | "4": { | ||
187 | "action": "run command which devtool \n\n", | ||
188 | "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n" | ||
189 | }, | ||
190 | "5": { | ||
191 | "action": "devtool sdk-install -s libxml2 \n\n", | ||
192 | "expected_results": "this should install libxml2 \n\n" | ||
193 | }, | ||
194 | "6": { | ||
195 | "action": "devtool add librdfa https://github.com/rdfa/librdfa \n\n", | ||
196 | "expected_results": "This should automatically create the recipe librdfa.bb under /recipes/librdfa/librdfa.bb \n\n" | ||
197 | }, | ||
198 | "7": { | ||
199 | "action": "devtool build librdfa \n\n", | ||
200 | "expected_results": "This should compile \n\n" | ||
201 | }, | ||
202 | "8": { | ||
203 | "action": "devtool reset librdfa ", | ||
204 | "expected_results": "This cleans sysroot of the librdfa recipe, but it leaves the source tree intact. meaning it does not erase." | ||
205 | } | ||
206 | }, | ||
207 | "summary": "sdkext_extend_autotools_recipe_creation" | ||
208 | } | ||
209 | }, | ||
210 | { | ||
211 | "test": { | ||
212 | "@alias": "crops-default.crops-default.sdkext_devtool_kernelmodule", | ||
213 | "author": [ | ||
214 | { | ||
215 | "email": "francisco.j.pedraza.gonzalez@intel.com", | ||
216 | "name": "francisco.j.pedraza.gonzalez@intel.com" | ||
217 | } | ||
218 | ], | ||
219 | "execution": { | ||
220 | "1": { | ||
221 | "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n", | ||
222 | "expected_results": "" | ||
223 | }, | ||
224 | "2": { | ||
225 | "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n", | ||
226 | "expected_results": "" | ||
227 | }, | ||
228 | "3": { | ||
229 | "action": "source environment-setup-i586-poky-linux \n\n", | ||
230 | "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n \n" | ||
231 | }, | ||
232 | "4": { | ||
233 | "action": "run command which devtool \n\n", | ||
234 | "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n" | ||
235 | }, | ||
236 | "5": { | ||
237 | "action": "devtool add kernel-module-hello-world https://git.yoctoproject.org/git/kernel-module-hello-world \n\n", | ||
238 | "expected_results": "This should automatically create the recipe kernel-module-hello-world.bb under <crops-esdk-workdir-workspace>/recipes/kernel-module-hello-world/kernel-module-hello-world.bb " | ||
239 | }, | ||
240 | "6": { | ||
241 | "action": "devtool build kernel-module-hello-world \n\n", | ||
242 | "expected_results": "This should compile an image \n\n" | ||
243 | }, | ||
244 | "7": { | ||
245 | "action": "devtool reset kernel-module-hello-world ", | ||
246 | "expected_results": "This cleans sysroot of the kernel-module-hello-world recipe, but it leaves the source tree intact. meaning it does not erase." | ||
247 | } | ||
248 | }, | ||
249 | "summary": "sdkext_devtool_kernelmodule" | ||
250 | } | ||
251 | }, | ||
252 | { | ||
253 | "test": { | ||
254 | "@alias": "crops-default.crops-default.sdkext_recipes_for_nodejs", | ||
255 | "author": [ | ||
256 | { | ||
257 | "email": "francisco.j.pedraza.gonzalez@intel.com", | ||
258 | "name": "francisco.j.pedraza.gonzalez@intel.com" | ||
259 | } | ||
260 | ], | ||
261 | "execution": { | ||
262 | "1": { | ||
263 | "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\nlets say variable npm = npm://registry.npmjs.org;name=winston;version=2.2.0 \n\n", | ||
264 | "expected_results": "" | ||
265 | }, | ||
266 | "2": { | ||
267 | "action": "Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n", | ||
268 | "expected_results": "" | ||
269 | }, | ||
270 | "3": { | ||
271 | "action": "source environment-setup-i586-poky-linux \n\n", | ||
272 | "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n" | ||
273 | }, | ||
274 | "4": { | ||
275 | "action": "run command which devtool \n\n", | ||
276 | "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n" | ||
277 | }, | ||
278 | "5": { | ||
279 | "action": " 4a) git clone git://git.openembedded.org/meta-openembedded in layers/build directory \n \n4b) Add meta-openembedded/meta-oe in bblayer.conf as mentioned below: ${SDKBASEMETAPATH}/layers/build/meta-openembedded/meta-oe \\ \n\n4c) devtool add \"npm://registry.npmjs.org;name=npm;version=2.2.0\" \n\n", | ||
280 | "expected_results": " This should automatically create the recipe npm.bb under /recipes/npm/npm.bb \n\n" | ||
281 | }, | ||
282 | "6": { | ||
283 | "action": "devtool build npm \n\n", | ||
284 | "expected_results": "This should compile an image \n\n" | ||
285 | }, | ||
286 | "7": { | ||
287 | "action": " devtool reset npm", | ||
288 | "expected_results": "This cleans sysroot of the npm recipe, but it leaves the source tree intact. meaning it does not erase." | ||
289 | } | ||
290 | }, | ||
291 | "summary": "sdkext_recipes_for_nodejs" | ||
292 | } | ||
293 | } | ||
294 | ] | ||
diff --git a/meta/lib/oeqa/manual/eclipse-plugin.json b/meta/lib/oeqa/manual/eclipse-plugin.json deleted file mode 100644 index 6c110d0656..0000000000 --- a/meta/lib/oeqa/manual/eclipse-plugin.json +++ /dev/null | |||
@@ -1,322 +0,0 @@ | |||
1 | [ | ||
2 | { | ||
3 | "test": { | ||
4 | "@alias": "eclipse-plugin.eclipse-plugin.support_SSH_connection_to_Target", | ||
5 | "author": [ | ||
6 | { | ||
7 | "email": "ee.peng.yeoh@intel.com", | ||
8 | "name": "ee.peng.yeoh@intel.com" | ||
9 | } | ||
10 | ], | ||
11 | "execution": { | ||
12 | "1": { | ||
13 | "action": "In Eclipse, swich to Remote System Explorer to create a connention baseed on SSH, input the remote target IP address as the Host name, make sure disable the proxy in Window->Preferences->General->Network Connection, set Direct as Active Provider field. ", | ||
14 | "expected_results": "the connection based on SSH could be set up." | ||
15 | }, | ||
16 | "2": { | ||
17 | "action": "Configure connection from Eclipse: Run->Run Configurations->C/C++ Remote Application\\ ->New Connection->General->SSH Only ", | ||
18 | "expected_results": "" | ||
19 | }, | ||
20 | "3": { | ||
21 | "action": "Then right click to connect, input the user ID and password. ", | ||
22 | "expected_results": "" | ||
23 | }, | ||
24 | "4": { | ||
25 | "action": "expand the connection, it will show the Sftp Files etc. \nNOTE. Might need to change dropbear to openssh and add the packagegroup-core-eclipse-debug recipe", | ||
26 | "expected_results": "" | ||
27 | } | ||
28 | }, | ||
29 | "summary": "support_SSH_connection_to_Target" | ||
30 | } | ||
31 | }, | ||
32 | { | ||
33 | "test": { | ||
34 | "@alias": "eclipse-plugin.eclipse-plugin.Launch_QEMU_from_Eclipse", | ||
35 | "author": [ | ||
36 | { | ||
37 | "email": "ee.peng.yeoh@intel.com", | ||
38 | "name": "ee.peng.yeoh@intel.com" | ||
39 | } | ||
40 | ], | ||
41 | "execution": { | ||
42 | "1": { | ||
43 | "action": "Set the Yocto ADT's toolchain root location, sysroot location and kernel, in the menu Window -> Preferences -> Yocto ADT. \n \n", | ||
44 | "expected_results": "" | ||
45 | }, | ||
46 | "2": { | ||
47 | "action": "wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/ (ex:core-image-sato-sdk-qemux86-date-rootfs-tar-bz2) \nsource /opt/poky/version/environment-setup-i585-poky-linux \n\nExtract qemu with runqemu-extract-sdk /home/user/file(ex.core-image-sato-sdk-qemux86.bz2) \n/home/user/qemux86-sato-sdk \n\n", | ||
48 | "expected_results": " Qemu can be lauched normally." | ||
49 | }, | ||
50 | "3": { | ||
51 | "action": "(a)Point to the Toolchain: \n \nIf you are using a stand-alone pre-built toolchain, you should be pointing to the /opt/poky/{test-version} directory as Toolchain Root Location. This is the default location for toolchains installed by the ADT Installer or by hand. If ADT is installed in other location, use that location as Toolchain location.\nIf you are using a system-derived toolchain, the path you provide for the Toolchain Root Location field is the Yocto Project's build directory. \n \n E.g:/home/user/yocto/poky/build \n", | ||
52 | "expected_results": "" | ||
53 | }, | ||
54 | "4": { | ||
55 | "action": "(b)Specify the Sysroot Location: \nSysroot Location is the location where the root filesystem for the target hardware is created on the development system by the ADT Installer (SYSROOT in step 2 of the case ADT installer Installation). \n \n Local : e.g: /home/user/qemux86-sato-sdk \nUsing ADT : e.g :/home/user/test-yocto/qemux86 \n\n", | ||
56 | "expected_results": "" | ||
57 | }, | ||
58 | "5": { | ||
59 | "action": "(c)Select the Target Architecture: \n \nThe target architecture is the type of hardware you are going to use or emulate. Use the pull-down Target Architecture menu to make your selection. \n \n\n", | ||
60 | "expected_results": "" | ||
61 | }, | ||
62 | "6": { | ||
63 | "action": "(d) QEMU: \nSelect this option if you will be using the QEMU emulator. Specify the Kernel matching the QEMU architecture you are using. \n wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/bzImage-qemux86.bin \n e.g: /home/$USER/yocto/adt-installer/download_image/bzImage-qemux86.bin \n\n", | ||
64 | "expected_results": "" | ||
65 | }, | ||
66 | "7": { | ||
67 | "action": "(e) select OK to save the settings. \n\n\n1: In the Eclipse toolbar, expose the Run -> External Tools menu. Your image should appear as a selectable menu item. \n2: Select your image in the navigation pane to launch the emulator in a new window. \n3: If needed, enter your host root password in the shell window at the prompt. This sets up a Tap 0 connection needed for running in user-space NFS mode. \n", | ||
68 | "expected_results": "" | ||
69 | } | ||
70 | }, | ||
71 | "summary": "Launch_QEMU_from_Eclipse" | ||
72 | } | ||
73 | }, | ||
74 | { | ||
75 | "test": { | ||
76 | "@alias": "eclipse-plugin.eclipse-plugin.Relocatable_SDK_-_C_-_Build_Hello_World_ANSI_C_Autotools_Project", | ||
77 | "author": [ | ||
78 | { | ||
79 | "email": "ee.peng.yeoh@intel.com", | ||
80 | "name": "ee.peng.yeoh@intel.com" | ||
81 | } | ||
82 | ], | ||
83 | "execution": { | ||
84 | "1": { | ||
85 | "action": "Launch a QEMU of target environment.(Reference to case \"ADT - Launch qemu by eclipse\") ", | ||
86 | "expected_results": "" | ||
87 | }, | ||
88 | "2": { | ||
89 | "action": "Select File -> New -> Project.", | ||
90 | "expected_results": "" | ||
91 | }, | ||
92 | "3": { | ||
93 | "action": "Double click C/C++.", | ||
94 | "expected_results": "" | ||
95 | }, | ||
96 | "4": { | ||
97 | "action": "Click C or C++ Project to create the project.", | ||
98 | "expected_results": "" | ||
99 | }, | ||
100 | "5": { | ||
101 | "action": "Expand Yocto ADT Project.", | ||
102 | "expected_results": "" | ||
103 | }, | ||
104 | "6": { | ||
105 | "action": "Select Hello World ANSI C Autotools Project.", | ||
106 | "expected_results": "" | ||
107 | }, | ||
108 | "7": { | ||
109 | "action": "Put a name in the Project name. Do not use hyphens as part of the name. \n \n", | ||
110 | "expected_results": "" | ||
111 | }, | ||
112 | "8": { | ||
113 | "action": "Click Next.", | ||
114 | "expected_results": "" | ||
115 | }, | ||
116 | "9": { | ||
117 | "action": "Add information in the Author and Copyright notice fields. \n1", | ||
118 | "expected_results": "" | ||
119 | }, | ||
120 | "10": { | ||
121 | "action": "Click Finish. \n1", | ||
122 | "expected_results": "" | ||
123 | }, | ||
124 | "11": { | ||
125 | "action": "If the \"open perspective\" prompt appears, click \"Yes\" so that you open the C/C++ perspective. \n1", | ||
126 | "expected_results": "" | ||
127 | }, | ||
128 | "12": { | ||
129 | "action": "In the Project Explorer window, right click the project -> Reconfigure project. \n1", | ||
130 | "expected_results": "" | ||
131 | }, | ||
132 | "13": { | ||
133 | "action": "In the Project Explorer window, right click the project -> Build project. \n1", | ||
134 | "expected_results": "Under the Project files, a new folder appears called Binaries. This indicates that the compilation have been successful and the project binary have been created. \n" | ||
135 | }, | ||
136 | "14": { | ||
137 | "action": "Right click it again and Run as -> Run Configurations. \n\t\t\tUnder Run Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. e.g.: /home/root/myapplication \n\t\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button. \n1", | ||
138 | "expected_results": "step 14 to step 16 -> Build succeed and the console outputs Hello world, you can also check the output on target." | ||
139 | }, | ||
140 | "15": { | ||
141 | "action": "After all settings are done, select the Run button on the bottom right corner \n\n1", | ||
142 | "expected_results": "" | ||
143 | }, | ||
144 | "16": { | ||
145 | "action": "Repeat the steps 14-15, but instead of using Run Configurations use Debug Configurations: \nRight click it again and Debug as -> Debug Configurations \nUnder Debug Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \nin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application.\ne.g.: /home/root/myapplication \nIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button \n1", | ||
146 | "expected_results": "" | ||
147 | }, | ||
148 | "17": { | ||
149 | "action": "After all settings are done, select the Debug button on the bottom right corner", | ||
150 | "expected_results": "" | ||
151 | } | ||
152 | }, | ||
153 | "summary": "Relocatable_SDK_-_C_-_Build_Hello_World_ANSI_C_Autotools_Project" | ||
154 | } | ||
155 | }, | ||
156 | { | ||
157 | "test": { | ||
158 | "@alias": "eclipse-plugin.eclipse-plugin.Relocatable_SDK_-_C++_-_Build_Hello_World_C++_Autotools_project", | ||
159 | "author": [ | ||
160 | { | ||
161 | "email": "ee.peng.yeoh@intel.com", | ||
162 | "name": "ee.peng.yeoh@intel.com" | ||
163 | } | ||
164 | ], | ||
165 | "execution": { | ||
166 | "1": { | ||
167 | "action": "Launch a QEMU of target environment.(Reference to case \"ADT - Launch qemu by eclipse\") ", | ||
168 | "expected_results": "" | ||
169 | }, | ||
170 | "2": { | ||
171 | "action": "Select File -> New -> Project. ", | ||
172 | "expected_results": "" | ||
173 | }, | ||
174 | "3": { | ||
175 | "action": "Double click C/C++. ", | ||
176 | "expected_results": "" | ||
177 | }, | ||
178 | "4": { | ||
179 | "action": "Click C or C++ Project to create the project. ", | ||
180 | "expected_results": "" | ||
181 | }, | ||
182 | "5": { | ||
183 | "action": "Expand Yocto ADT Project. ", | ||
184 | "expected_results": "" | ||
185 | }, | ||
186 | "6": { | ||
187 | "action": "Select Hello World ANSI C++ Autotools Project. ", | ||
188 | "expected_results": "" | ||
189 | }, | ||
190 | "7": { | ||
191 | "action": "Put a name in the Project name. Do not use hyphens as part of the name. \n \n", | ||
192 | "expected_results": "" | ||
193 | }, | ||
194 | "8": { | ||
195 | "action": "Click Next.", | ||
196 | "expected_results": "" | ||
197 | }, | ||
198 | "9": { | ||
199 | "action": "Add information in the Author and Copyright notice fields.", | ||
200 | "expected_results": "" | ||
201 | }, | ||
202 | "10": { | ||
203 | "action": "Click Finish. \n1", | ||
204 | "expected_results": "" | ||
205 | }, | ||
206 | "11": { | ||
207 | "action": "If the \"open perspective\" prompt appears, click \"Yes\" so that you open the C/C++ perspective. \n1", | ||
208 | "expected_results": "" | ||
209 | }, | ||
210 | "12": { | ||
211 | "action": "In the Project Explorer window, right click the project -> Reconfigure project. \n1", | ||
212 | "expected_results": "" | ||
213 | }, | ||
214 | "13": { | ||
215 | "action": "In the Project Explorer window, right click the project -> Build project. \n\n1", | ||
216 | "expected_results": "under the Project files, a new folder appears called Binaries. This indicates that the compilation have been successful and the project binary have been created. \n" | ||
217 | }, | ||
218 | "14": { | ||
219 | "action": "Right click it again and Run as -> Run Configurations. \n\t\t\tUnder Run Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. e.g.: /home/root/myapplication \n\t\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button. \n1", | ||
220 | "expected_results": "step 14 to step 16 -> Build succeed and the console outputs Hello world, you can also check the output on target." | ||
221 | }, | ||
222 | "15": { | ||
223 | "action": "After all settings are done, select the Run button on the bottom right corner \n\n1", | ||
224 | "expected_results": "" | ||
225 | }, | ||
226 | "16": { | ||
227 | "action": "Repeat the steps 14-15, but instead of using Run Configurations use Debug Configurations: \n\t\tRight click it again and Debug as -> Debug Configurations \n\t\tUnder Debug Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. \n\t\te.g.: /home/root/myapplication \n\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button \n1", | ||
228 | "expected_results": "" | ||
229 | }, | ||
230 | "17": { | ||
231 | "action": "After all settings are done, select the Debug button on the bottom right corner", | ||
232 | "expected_results": "" | ||
233 | } | ||
234 | }, | ||
235 | "summary": "Relocatable_SDK_-_C++_-_Build_Hello_World_C++_Autotools_project" | ||
236 | } | ||
237 | }, | ||
238 | { | ||
239 | "test": { | ||
240 | "@alias": "eclipse-plugin.eclipse-plugin.Build_Eclipse_Plugin_from_source", | ||
241 | "author": [ | ||
242 | { | ||
243 | "email": "laurentiu.serban@intel.com", | ||
244 | "name": "laurentiu.serban@intel.com" | ||
245 | } | ||
246 | ], | ||
247 | "execution": { | ||
248 | "1": { | ||
249 | "action": "Clone eclipse-poky source. \n \n - git clone git://git.yoctoproject.org/eclipse-poky \n\n", | ||
250 | "expected_results": "Eclipse plugin is successfully installed \n\nDocumentation is there. For example if you have release yocto-2.0.1 you will found on https://downloads.yoctoproject.org/releases/yocto/yocto-2.0.1/eclipse-plugin/mars/ archive with documentation like org.yocto.doc-development-$date.zip \n \n" | ||
251 | }, | ||
252 | "2": { | ||
253 | "action": "Checkout correct tag. \n\n - git checkout <eclipse-version>/<yocto-version> \n\n", | ||
254 | "expected_results": "After plugin is build you must have 4 archive in foder scripts from eclipse-poky: \n - org.yocto.bc - mars-master-$date.zip \n - org.yocto.doc - mars-master-$date.zip --> documentation \n - org.yocto.sdk - mars-master-$date.zip \n - org.yocto.sdk - mars-master-$date.-archive.zip --> plugin " | ||
255 | }, | ||
256 | "3": { | ||
257 | "action": "Move to scripts/ folder. \n\n", | ||
258 | "expected_results": "" | ||
259 | }, | ||
260 | "4": { | ||
261 | "action": "Run ./setup.sh \n\n", | ||
262 | "expected_results": "" | ||
263 | }, | ||
264 | "5": { | ||
265 | "action": "When the script finishes, it prompts a command to issue to build the plugin. It should look similar to the following: \n\n$ ECLIPSE_HOME=/eclipse-poky/scripts/eclipse ./build.sh /&1 | tee -a build.log \n\nHere, the three arguments to the build script are tag name, branch for documentation and release name. \n\n", | ||
266 | "expected_results": "" | ||
267 | }, | ||
268 | "6": { | ||
269 | "action": "On an eclipse without the Yocto Plugin, select \"Install New Software\" from Help pull-down menu \n\n", | ||
270 | "expected_results": "" | ||
271 | }, | ||
272 | "7": { | ||
273 | "action": "Select Add and from the dialog choose Archive... Look for the *archive.zip file that was built previously with the build.sh script. Click OK. \n\n", | ||
274 | "expected_results": "" | ||
275 | }, | ||
276 | "8": { | ||
277 | "action": "Select all components and proceed with Installation of plugin. Restarting eclipse might be required.\n", | ||
278 | "expected_results": "" | ||
279 | } | ||
280 | }, | ||
281 | "summary": "Build_Eclipse_Plugin_from_source" | ||
282 | } | ||
283 | }, | ||
284 | { | ||
285 | "test": { | ||
286 | "@alias": "eclipse-plugin.eclipse-plugin.Eclipse_Poky_installation_and_setup", | ||
287 | "author": [ | ||
288 | { | ||
289 | "email": "ee.peng.yeoh@intel.com", | ||
290 | "name": "ee.peng.yeoh@intel.com" | ||
291 | } | ||
292 | ], | ||
293 | "execution": { | ||
294 | "1": { | ||
295 | "action": "Install SDK \n\ta)Download https://autobuilder.yocto.io/pub/releases//toolchain/x86_64/poky-glibc-x86_64-core-\timage-sato-i586-toolchain-.sh \n\tb)Run the SDK installer and accept the default installation directory ", | ||
296 | "expected_results": "" | ||
297 | }, | ||
298 | "2": { | ||
299 | "action": "Install \"Eclipse IDE for C/C++ Developers\" Oxygen release (4.7.0) \n\ta) Go to https://www.eclipse.org/downloads/packages/all, click \"Oxygen R\" \n\tb) Click to download the build for your OS \n\tc) Click \"Download\" button to download from a mirror \n\td) Run \"tar xf\" to extract the downloaded archive ", | ||
300 | "expected_result": "" | ||
301 | }, | ||
302 | "3": { | ||
303 | "action": "Install \"Eclipse IDE for C/C++ Developers\" Oxygen release (4.7.0) (Continue) \n\te) Run \"eclipse/eclipse\" to start Eclipse \n\tf) Optional step for host machine within Intel network: In Eclipse workbench window, go to \"Window\" menu -> \"Preferences...\". \n\tg) In \"Preferences\" dialog, go to \"General\" -> \"Network Connections\", set \"Active Provider\" to \"Manual\". In \"Proxy \tentries\" table, select HTTP and click \"Edit\" and enter host \"proxy-chain.intel.com\" port 911, click OK. Repeat for HTTPS with port 912 \nClick OK to close \"Preferences\" dialog. \n\th) Go to \"File\" menu -> \"Restart\" to restart Eclipse for proxy settings to take effect. ", | ||
304 | "expected_result": "" | ||
305 | }, | ||
306 | "4": { | ||
307 | "action": "Install Eclipse Poky plugins \n\ta) Download https://autobuilder.yocto.io/pub/releases/<yocto-version>/eclipse-plugin/<eclipse-version>/org.yocto.sdk-development-<date>-archive.zip \n\tb) In Eclipse workbench window, go to \"Help\" menu -> \"Install New Software...\" \n\tc) In \"Install\" dialog, click \"Add...\" button \n\td) In \"Add Repository\" dialog, enter \"Eclipse Poky\" for (repository) Name, click \"Archive...\" ", | ||
308 | "expected_results": "" | ||
309 | }, | ||
310 | "5": { | ||
311 | "action": "Install Eclipse Poky plugins (continue) \n\te) In \"Repository archive\" browse dialog, select the downloaded Eclipse Poky repository archive \n\tf) Back in \"Add Repository\" dialog, click \"OK\" \n\tg) Back in \"Install\" dialog, make sure \"Work with:\" is set to \"Eclipse Poky\" repository, tick \"Yocto Project \tDocumentation Plug-in\" and \"Yocto Project SDK Plug-in\", click \"Next >\" and verify plugins/features name/version, \tclick \"Next >\" and accept license agreement, click \"Finish\" \n\th) If \"Security Warning\" dialog appears, click \"OK\" to install unsigned content. \n\ti) In \"Software Updates\" dialog, click \"Yes\" to restart Eclipse to complete Eclipse Poky plugins installation. ", | ||
312 | "expected_results": "" | ||
313 | }, | ||
314 | "6": { | ||
315 | "action": "Setup Eclipse Poky to use SDK \n\ta) In Eclipse workbench window, go to \"Window\" menu -> \"Preferences\". \n\tb) In \"Preferences\" window, go to \"Yocto Project SDK\", in \"Cross Compiler Options\" frame, select \"Standalone pre-\tbuilt toolchain\". ", | ||
316 | "expected_results": "Eclipse Poky plugins installed and running successfully, e.g. observe that \"Yocto Project Tools\" menu is available on Eclipse workbench window." | ||
317 | } | ||
318 | }, | ||
319 | "summary": "Eclipse_Poky_installation_and_setup" | ||
320 | } | ||
321 | } | ||
322 | ] | ||
diff --git a/meta/lib/oeqa/runtime/case.py b/meta/lib/oeqa/runtime/case.py index f036982e1f..9515ca2f3d 100644 --- a/meta/lib/oeqa/runtime/case.py +++ b/meta/lib/oeqa/runtime/case.py | |||
@@ -4,6 +4,9 @@ | |||
4 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
5 | # | 5 | # |
6 | 6 | ||
7 | import os | ||
8 | import subprocess | ||
9 | import time | ||
7 | from oeqa.core.case import OETestCase | 10 | from oeqa.core.case import OETestCase |
8 | from oeqa.utils.package_manager import install_package, uninstall_package | 11 | from oeqa.utils.package_manager import install_package, uninstall_package |
9 | 12 | ||
@@ -18,3 +21,16 @@ class OERuntimeTestCase(OETestCase): | |||
18 | def tearDown(self): | 21 | def tearDown(self): |
19 | super(OERuntimeTestCase, self).tearDown() | 22 | super(OERuntimeTestCase, self).tearDown() |
20 | uninstall_package(self) | 23 | uninstall_package(self) |
24 | |||
25 | def run_network_serialdebug(runner): | ||
26 | status, output = runner.run_serial("ip addr") | ||
27 | print("ip addr on target: %s %s" % (output, status)) | ||
28 | status, output = runner.run_serial("ping -c 1 %s" % self.target.server_ip) | ||
29 | print("ping on target for %s: %s %s" % (self.target.server_ip, output, status)) | ||
30 | status, output = runner.run_serial("ping -c 1 %s" % self.target.ip) | ||
31 | print("ping on target for %s: %s %s" % (self.target.ip, output, status)) | ||
32 | # Have to use a full path for netstat which isn't in HOSTTOOLS | ||
33 | subprocess.call(["/usr/bin/netstat", "-tunape"]) | ||
34 | subprocess.call(["/usr/bin/netstat", "-ei"]) | ||
35 | subprocess.call(["ps", "-awx"], shell=True) | ||
36 | print("PID: %s %s" % (str(os.getpid()), time.time())) | ||
diff --git a/meta/lib/oeqa/runtime/cases/buildcpio.py b/meta/lib/oeqa/runtime/cases/buildcpio.py index 7be734cb4f..0c9c57a3cb 100644 --- a/meta/lib/oeqa/runtime/cases/buildcpio.py +++ b/meta/lib/oeqa/runtime/cases/buildcpio.py | |||
@@ -29,6 +29,6 @@ class BuildCpioTest(OERuntimeTestCase): | |||
29 | @OEHasPackage(['autoconf']) | 29 | @OEHasPackage(['autoconf']) |
30 | def test_cpio(self): | 30 | def test_cpio(self): |
31 | self.project.download_archive() | 31 | self.project.download_archive() |
32 | self.project.run_configure() | 32 | self.project.run_configure(configure_args="CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration'") |
33 | self.project.run_make() | 33 | self.project.run_make(make_args="CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration'") |
34 | self.project.run_install() | 34 | self.project.run_install() |
diff --git a/meta/lib/oeqa/runtime/cases/buildlzip.py b/meta/lib/oeqa/runtime/cases/buildlzip.py index 44f4f1be71..921a0bca61 100644 --- a/meta/lib/oeqa/runtime/cases/buildlzip.py +++ b/meta/lib/oeqa/runtime/cases/buildlzip.py | |||
@@ -15,7 +15,7 @@ class BuildLzipTest(OERuntimeTestCase): | |||
15 | @classmethod | 15 | @classmethod |
16 | def setUpClass(cls): | 16 | def setUpClass(cls): |
17 | uri = 'http://downloads.yoctoproject.org/mirror/sources' | 17 | uri = 'http://downloads.yoctoproject.org/mirror/sources' |
18 | uri = '%s/lzip-1.19.tar.gz' % uri | 18 | uri = '%s/lzip-1.25.tar.gz' % uri |
19 | cls.project = TargetBuildProject(cls.tc.target, | 19 | cls.project = TargetBuildProject(cls.tc.target, |
20 | uri, | 20 | uri, |
21 | dl_dir = cls.tc.td['DL_DIR']) | 21 | dl_dir = cls.tc.td['DL_DIR']) |
diff --git a/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py b/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py index eac8f2d082..c3be60f006 100644 --- a/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py +++ b/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py | |||
@@ -9,26 +9,7 @@ from oeqa.core.decorator.data import skipIfQemu | |||
9 | 9 | ||
10 | class Ethernet_Test(OERuntimeTestCase): | 10 | class Ethernet_Test(OERuntimeTestCase): |
11 | 11 | ||
12 | def set_ip(self, x): | ||
13 | x = x.split(".") | ||
14 | sample_host_address = '150' | ||
15 | x[3] = sample_host_address | ||
16 | x = '.'.join(x) | ||
17 | return x | ||
18 | |||
19 | @skipIfQemu() | 12 | @skipIfQemu() |
20 | @OETestDepends(['ssh.SSHTest.test_ssh']) | ||
21 | def test_set_virtual_ip(self): | ||
22 | (status, output) = self.target.run("ifconfig eth0 | grep 'inet ' | awk '{print $2}'") | ||
23 | self.assertEqual(status, 0, msg='Failed to get ip address. Make sure you have an ethernet connection on your device, output: %s' % output) | ||
24 | original_ip = output | ||
25 | virtual_ip = self.set_ip(original_ip) | ||
26 | |||
27 | (status, output) = self.target.run("ifconfig eth0:1 %s netmask 255.255.255.0 && sleep 2 && ping -c 5 %s && ifconfig eth0:1 down" % (virtual_ip,virtual_ip)) | ||
28 | self.assertEqual(status, 0, msg='Failed to create virtual ip address, output: %s' % output) | ||
29 | |||
30 | @skipIfQemu() | ||
31 | @OETestDepends(['ethernet_ip_connman.Ethernet_Test.test_set_virtual_ip']) | ||
32 | def test_get_ip_from_dhcp(self): | 13 | def test_get_ip_from_dhcp(self): |
33 | (status, output) = self.target.run("connmanctl services | grep -E '*AO Wired|*AR Wired' | awk '{print $3}'") | 14 | (status, output) = self.target.run("connmanctl services | grep -E '*AO Wired|*AR Wired' | awk '{print $3}'") |
34 | self.assertEqual(status, 0, msg='No wired interfaces are detected, output: %s' % output) | 15 | self.assertEqual(status, 0, msg='No wired interfaces are detected, output: %s' % output) |
@@ -39,4 +20,4 @@ class Ethernet_Test(OERuntimeTestCase): | |||
39 | default_gateway = output | 20 | default_gateway = output |
40 | 21 | ||
41 | (status, output) = self.target.run("connmanctl config %s --ipv4 dhcp && sleep 2 && ping -c 5 %s" % (wired_interfaces,default_gateway)) | 22 | (status, output) = self.target.run("connmanctl config %s --ipv4 dhcp && sleep 2 && ping -c 5 %s" % (wired_interfaces,default_gateway)) |
42 | self.assertEqual(status, 0, msg='Failed to get dynamic IP address via DHCP in connmand, output: %s' % output) \ No newline at end of file | 23 | self.assertEqual(status, 0, msg='Failed to get dynamic IP address via DHCP in connmand, output: %s' % output) |
diff --git a/meta/lib/oeqa/runtime/cases/ltp.py b/meta/lib/oeqa/runtime/cases/ltp.py index f588a93200..e81360670c 100644 --- a/meta/lib/oeqa/runtime/cases/ltp.py +++ b/meta/lib/oeqa/runtime/cases/ltp.py | |||
@@ -57,7 +57,7 @@ class LtpTestBase(OERuntimeTestCase): | |||
57 | 57 | ||
58 | class LtpTest(LtpTestBase): | 58 | class LtpTest(LtpTestBase): |
59 | 59 | ||
60 | ltp_groups = ["math", "syscalls", "dio", "io", "mm", "ipc", "sched", "nptl", "pty", "containers", "controllers", "filecaps", "cap_bounds", "fcntl-locktests", "commands", "net.ipv6_lib", "input","fs_perms_simple", "cve", "crypto", "ima", "net.nfs", "net_stress.ipsec_icmp", "net.ipv6", "numa", "uevent", "ltp-aiodio.part1", "ltp-aiodio.part2", "ltp-aiodio.part3", "ltp-aiodio.part4"] | 60 | ltp_groups = ["math", "syscalls", "dio", "mm", "ipc", "sched", "nptl", "pty", "containers", "controllers", "fcntl-locktests", "commands", "net.ipv6_lib", "input","fs_perms_simple", "cve", "crypto", "ima", "net.nfs", "net_stress.ipsec_icmp", "net.ipv6", "numa", "uevent", "ltp-aiodio.part1", "ltp-aiodio.part2", "ltp-aiodio.part3", "ltp-aiodio.part4"] |
61 | 61 | ||
62 | ltp_fs = ["fs", "fs_bind"] | 62 | ltp_fs = ["fs", "fs_bind"] |
63 | # skip kernel cpuhotplug | 63 | # skip kernel cpuhotplug |
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt index 2c0bd9a247..156b0f9c10 100644 --- a/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt +++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt | |||
@@ -1,2 +1,19 @@ | |||
1 | # These should be reviewed to see if they are still needed | 1 | # These should be reviewed to see if they are still needed |
2 | cacheinfo: Failed to find cpu0 device node | 2 | cacheinfo: Failed to find cpu0 device node |
3 | |||
4 | # 6.10 restructures sysctl registration such that mips | ||
5 | # registers an empty table and generates harmless warnings: | ||
6 | # failed when register_sysctl_sz sched_fair_sysctls to kernel | ||
7 | # failed when register_sysctl_sz sched_core_sysctls to kernel | ||
8 | failed when register_sysctl_sz sched | ||
9 | |||
10 | # With qemu 9.1.0 | ||
11 | # pci 0000:00:00.0: BAR 2: can't handle BAR above 4GB (bus address 0x1f00000010) | ||
12 | # pci 0000:00:00.0: BAR 5: error updating (0x1105d034 != 0x0100d034) | ||
13 | BAR 0: error updating | ||
14 | BAR 1: error updating | ||
15 | BAR 2: error updating | ||
16 | BAR 3: error updating | ||
17 | BAR 4: error updating | ||
18 | BAR 5: error updating | ||
19 | : can't handle BAR above 4GB | ||
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt index b0c0fc9ddf..143db40d63 100644 --- a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt +++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt | |||
@@ -13,6 +13,14 @@ FBIOPUT_VSCREENINFO failed, double buffering disabled | |||
13 | # pci 0000:00:00.0: [Firmware Bug]: reg 0x20: invalid BAR (can't size) | 13 | # pci 0000:00:00.0: [Firmware Bug]: reg 0x20: invalid BAR (can't size) |
14 | # pci 0000:00:00.0: [Firmware Bug]: reg 0x24: invalid BAR (can't size) | 14 | # pci 0000:00:00.0: [Firmware Bug]: reg 0x24: invalid BAR (can't size) |
15 | invalid BAR (can't size) | 15 | invalid BAR (can't size) |
16 | # 6.10+ the invalid BAR warnings are of this format: | ||
17 | # pci 0000:00:00.0: [Firmware Bug]: BAR 0: invalid; can't size | ||
18 | # pci 0000:00:00.0: [Firmware Bug]: BAR 1: invalid; can't size | ||
19 | # pci 0000:00:00.0: [Firmware Bug]: BAR 2: invalid; can't size | ||
20 | # pci 0000:00:00.0: [Firmware Bug]: BAR 3: invalid; can't size | ||
21 | # pci 0000:00:00.0: [Firmware Bug]: BAR 4: invalid; can't size | ||
22 | # pci 0000:00:00.0: [Firmware Bug]: BAR 5: invalid; can't size | ||
23 | invalid; can't size | ||
16 | 24 | ||
17 | # These should be reviewed to see if they are still needed | 25 | # These should be reviewed to see if they are still needed |
18 | wrong ELF class | 26 | wrong ELF class |
diff --git a/meta/lib/oeqa/runtime/cases/parselogs.py b/meta/lib/oeqa/runtime/cases/parselogs.py index 6966923c94..47c77fccd5 100644 --- a/meta/lib/oeqa/runtime/cases/parselogs.py +++ b/meta/lib/oeqa/runtime/cases/parselogs.py | |||
@@ -34,7 +34,7 @@ class ParseLogsTest(OERuntimeTestCase): | |||
34 | log_locations = ["/var/log/", "/var/log/dmesg", "/tmp/dmesg_output.log"] | 34 | log_locations = ["/var/log/", "/var/log/dmesg", "/tmp/dmesg_output.log"] |
35 | 35 | ||
36 | # The keywords that identify error messages in the log files | 36 | # The keywords that identify error messages in the log files |
37 | errors = ["error", "cannot", "can't", "failed"] | 37 | errors = ["error", "cannot", "can't", "failed", "---[ cut here ]---", "No irq handler for vector"] |
38 | 38 | ||
39 | # A list of error messages that should be ignored | 39 | # A list of error messages that should be ignored |
40 | ignore_errors = [] | 40 | ignore_errors = [] |
diff --git a/meta/lib/oeqa/runtime/cases/ping.py b/meta/lib/oeqa/runtime/cases/ping.py index f72460e7f3..efb91d4cc9 100644 --- a/meta/lib/oeqa/runtime/cases/ping.py +++ b/meta/lib/oeqa/runtime/cases/ping.py | |||
@@ -7,7 +7,7 @@ | |||
7 | from subprocess import Popen, PIPE | 7 | from subprocess import Popen, PIPE |
8 | from time import sleep | 8 | from time import sleep |
9 | 9 | ||
10 | from oeqa.runtime.case import OERuntimeTestCase | 10 | from oeqa.runtime.case import OERuntimeTestCase, run_network_serialdebug |
11 | from oeqa.core.decorator.oetimeout import OETimeout | 11 | from oeqa.core.decorator.oetimeout import OETimeout |
12 | from oeqa.core.exception import OEQATimeoutError | 12 | from oeqa.core.exception import OEQATimeoutError |
13 | 13 | ||
@@ -18,6 +18,13 @@ class PingTest(OERuntimeTestCase): | |||
18 | output = '' | 18 | output = '' |
19 | count = 0 | 19 | count = 0 |
20 | self.assertNotEqual(len(self.target.ip), 0, msg="No target IP address set") | 20 | self.assertNotEqual(len(self.target.ip), 0, msg="No target IP address set") |
21 | |||
22 | # If the target IP is localhost (because user-space networking is being used), | ||
23 | # then there's no point in pinging it. | ||
24 | if self.target.ip.startswith("127.0.0.") or self.target.ip in ("localhost", "::1"): | ||
25 | print("runtime/ping: localhost detected, not pinging") | ||
26 | return | ||
27 | |||
21 | try: | 28 | try: |
22 | while count < 5: | 29 | while count < 5: |
23 | cmd = 'ping -c 1 %s' % self.target.ip | 30 | cmd = 'ping -c 1 %s' % self.target.ip |
@@ -29,6 +36,7 @@ class PingTest(OERuntimeTestCase): | |||
29 | count = 0 | 36 | count = 0 |
30 | sleep(1) | 37 | sleep(1) |
31 | except OEQATimeoutError: | 38 | except OEQATimeoutError: |
39 | run_network_serialdebug(self.target.runner) | ||
32 | self.fail("Ping timeout error for address %s, count %s, output: %s" % (self.target.ip, count, output)) | 40 | self.fail("Ping timeout error for address %s, count %s, output: %s" % (self.target.ip, count, output)) |
33 | msg = ('Expected 5 consecutive, got %d.\n' | 41 | msg = ('Expected 5 consecutive, got %d.\n' |
34 | 'ping output is:\n%s' % (count,output)) | 42 | 'ping output is:\n%s' % (count,output)) |
diff --git a/meta/lib/oeqa/runtime/cases/scp.py b/meta/lib/oeqa/runtime/cases/scp.py index ee97b8ef66..364264369a 100644 --- a/meta/lib/oeqa/runtime/cases/scp.py +++ b/meta/lib/oeqa/runtime/cases/scp.py | |||
@@ -25,7 +25,7 @@ class ScpTest(OERuntimeTestCase): | |||
25 | os.remove(cls.tmp_path) | 25 | os.remove(cls.tmp_path) |
26 | 26 | ||
27 | @OETestDepends(['ssh.SSHTest.test_ssh']) | 27 | @OETestDepends(['ssh.SSHTest.test_ssh']) |
28 | @OEHasPackage(['openssh-scp']) | 28 | @OEHasPackage({'openssh-scp', 'openssh-sftp-server'}) |
29 | def test_scp_file(self): | 29 | def test_scp_file(self): |
30 | dst = '/tmp/test_scp_file' | 30 | dst = '/tmp/test_scp_file' |
31 | 31 | ||
diff --git a/meta/lib/oeqa/runtime/cases/skeletoninit.py b/meta/lib/oeqa/runtime/cases/skeletoninit.py index d0fdcbded9..be7b39a9a3 100644 --- a/meta/lib/oeqa/runtime/cases/skeletoninit.py +++ b/meta/lib/oeqa/runtime/cases/skeletoninit.py | |||
@@ -4,8 +4,7 @@ | |||
4 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
5 | # | 5 | # |
6 | 6 | ||
7 | # This test should cover https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=284 | 7 | # Image under test must have meta-skeleton layer in bblayers and |
8 | # testcase. Image under test must have meta-skeleton layer in bblayers and | ||
9 | # IMAGE_INSTALL:append = " service" in local.conf | 8 | # IMAGE_INSTALL:append = " service" in local.conf |
10 | from oeqa.runtime.case import OERuntimeTestCase | 9 | from oeqa.runtime.case import OERuntimeTestCase |
11 | from oeqa.core.decorator.depends import OETestDepends | 10 | from oeqa.core.decorator.depends import OETestDepends |
diff --git a/meta/lib/oeqa/runtime/cases/ssh.py b/meta/lib/oeqa/runtime/cases/ssh.py index cdbef59500..b632a29a01 100644 --- a/meta/lib/oeqa/runtime/cases/ssh.py +++ b/meta/lib/oeqa/runtime/cases/ssh.py | |||
@@ -4,7 +4,10 @@ | |||
4 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
5 | # | 5 | # |
6 | 6 | ||
7 | from oeqa.runtime.case import OERuntimeTestCase | 7 | import time |
8 | import signal | ||
9 | |||
10 | from oeqa.runtime.case import OERuntimeTestCase, run_network_serialdebug | ||
8 | from oeqa.core.decorator.depends import OETestDepends | 11 | from oeqa.core.decorator.depends import OETestDepends |
9 | from oeqa.runtime.decorator.package import OEHasPackage | 12 | from oeqa.runtime.decorator.package import OEHasPackage |
10 | 13 | ||
@@ -13,12 +16,23 @@ class SSHTest(OERuntimeTestCase): | |||
13 | @OETestDepends(['ping.PingTest.test_ping']) | 16 | @OETestDepends(['ping.PingTest.test_ping']) |
14 | @OEHasPackage(['dropbear', 'openssh-sshd']) | 17 | @OEHasPackage(['dropbear', 'openssh-sshd']) |
15 | def test_ssh(self): | 18 | def test_ssh(self): |
16 | (status, output) = self.target.run('sleep 20', timeout=2) | 19 | for i in range(5): |
17 | msg='run() timed out but return code was zero.' | 20 | status, output = self.target.run("uname -a", timeout=30) |
18 | self.assertNotEqual(status, 0, msg=msg) | 21 | if status == 0: |
19 | (status, output) = self.target.run('uname -a') | 22 | break |
20 | self.assertEqual(status, 0, msg='SSH Test failed: %s' % output) | 23 | elif status == 255 or status == -signal.SIGTERM: |
21 | (status, output) = self.target.run('cat /etc/controllerimage') | 24 | # ssh returns 255 only if a ssh error occurs. This could |
22 | msg = "This isn't the right image - /etc/controllerimage " \ | 25 | # be an issue with "Connection refused" because the port |
23 | "shouldn't be here %s" % output | 26 | # isn't open yet, and this could check explicitly for that |
24 | self.assertEqual(status, 1, msg=msg) | 27 | # here. However, let's keep it simple and just retry for |
28 | # all errors a limited amount of times with a sleep to | ||
29 | # give it time for the port to open. | ||
30 | # We sometimes see -15 (SIGTERM) on slow emulation machines too, likely | ||
31 | # from boot/init not being 100% complete, retry for these too. | ||
32 | time.sleep(5) | ||
33 | continue | ||
34 | else: | ||
35 | run_network_serialdebug(self.target.runner) | ||
36 | self.fail("uname failed with \"%s\" (exit code %s)" % (output, status)) | ||
37 | if status != 0: | ||
38 | self.fail("ssh failed with \"%s\" (exit code %s)" % (output, status)) | ||
diff --git a/meta/lib/oeqa/runtime/cases/stap.py b/meta/lib/oeqa/runtime/cases/stap.py index 3be4162108..6b55e7de50 100644 --- a/meta/lib/oeqa/runtime/cases/stap.py +++ b/meta/lib/oeqa/runtime/cases/stap.py | |||
@@ -21,11 +21,12 @@ class StapTest(OERuntimeTestCase): | |||
21 | status, output = self.target.run(cmd, 900) | 21 | status, output = self.target.run(cmd, 900) |
22 | self.assertEqual(status, 0, msg='\n'.join([cmd, output])) | 22 | self.assertEqual(status, 0, msg='\n'.join([cmd, output])) |
23 | 23 | ||
24 | cmd = 'stap -v -p4 -m stap-hello --disable-cache -DSTP_NO_VERREL_CHECK -e \'probe oneshot { print("Hello, "); println("SystemTap!") }\'' | 24 | cmd = 'stap -v -p4 -m stap_hello --disable-cache -DSTP_NO_VERREL_CHECK -e \'probe oneshot { print("Hello, "); println("SystemTap!") }\'' |
25 | status, output = self.target.run(cmd, 900) | 25 | status, output = self.target.run(cmd, 900) |
26 | self.assertEqual(status, 0, msg='\n'.join([cmd, output])) | 26 | self.assertEqual(status, 0, msg='\n'.join([cmd, output])) |
27 | 27 | ||
28 | cmd = 'staprun -v -R -b1 stap-hello.ko' | 28 | cmd = 'staprun -v -R -b1 stap_hello.ko' |
29 | status, output = self.target.run(cmd, 60) | ||
29 | self.assertEqual(status, 0, msg='\n'.join([cmd, output])) | 30 | self.assertEqual(status, 0, msg='\n'.join([cmd, output])) |
30 | self.assertIn('Hello, SystemTap!', output, msg='\n'.join([cmd, output])) | 31 | self.assertIn('Hello, SystemTap!', output, msg='\n'.join([cmd, output])) |
31 | except: | 32 | except: |
diff --git a/meta/lib/oeqa/runtime/cases/systemd.py b/meta/lib/oeqa/runtime/cases/systemd.py index 5481e1d840..640f28abe9 100644 --- a/meta/lib/oeqa/runtime/cases/systemd.py +++ b/meta/lib/oeqa/runtime/cases/systemd.py | |||
@@ -145,18 +145,29 @@ class SystemdServiceTests(SystemdTest): | |||
145 | Verify that call-stacks generated by systemd-coredump contain symbolicated call-stacks, | 145 | Verify that call-stacks generated by systemd-coredump contain symbolicated call-stacks, |
146 | extracted from the minidebuginfo metadata (.gnu_debugdata elf section). | 146 | extracted from the minidebuginfo metadata (.gnu_debugdata elf section). |
147 | """ | 147 | """ |
148 | t_thread = threading.Thread(target=self.target.run, args=("ulimit -c unlimited && sleep 1000",)) | 148 | # use "env sleep" instead of "sleep" to avoid calling the shell builtin function |
149 | t_thread = threading.Thread(target=self.target.run, args=("ulimit -c unlimited && env sleep 1000",)) | ||
149 | t_thread.start() | 150 | t_thread.start() |
150 | time.sleep(1) | 151 | time.sleep(1) |
151 | 152 | ||
152 | status, output = self.target.run('pidof sleep') | 153 | status, sleep_pid = self.target.run('pidof sleep') |
153 | # cause segfault on purpose | 154 | # cause segfault on purpose |
154 | self.target.run('kill -SEGV %s' % output) | 155 | self.target.run('kill -SEGV %s' % sleep_pid) |
155 | self.assertEqual(status, 0, msg = 'Not able to find process that runs sleep, output : %s' % output) | 156 | self.assertEqual(status, 0, msg = 'Not able to find process that runs sleep, output : %s' % sleep_pid) |
156 | 157 | ||
157 | (status, output) = self.target.run('coredumpctl info') | 158 | # Give some time to systemd-coredump@.service to process the coredump |
159 | for x in range(20): | ||
160 | status, output = self.target.run('coredumpctl list %s' % sleep_pid) | ||
161 | if status == 0: | ||
162 | break | ||
163 | time.sleep(1) | ||
164 | else: | ||
165 | self.fail("Timed out waiting for coredump creation") | ||
166 | |||
167 | (status, output) = self.target.run('coredumpctl info %s' % sleep_pid) | ||
158 | self.assertEqual(status, 0, msg='MiniDebugInfo Test failed: %s' % output) | 168 | self.assertEqual(status, 0, msg='MiniDebugInfo Test failed: %s' % output) |
159 | self.assertEqual('sleep_for_duration (busybox.nosuid' in output, True, msg='Call stack is missing minidebuginfo symbols (functions shown as "n/a"): %s' % output) | 169 | self.assertEqual('sleep_for_duration (busybox.nosuid' in output or 'xnanosleep (sleep.coreutils' in output, |
170 | True, msg='Call stack is missing minidebuginfo symbols (functions shown as "n/a"): %s' % output) | ||
160 | 171 | ||
161 | class SystemdJournalTests(SystemdTest): | 172 | class SystemdJournalTests(SystemdTest): |
162 | 173 | ||
diff --git a/meta/lib/oeqa/runtime/cases/uki.py b/meta/lib/oeqa/runtime/cases/uki.py new file mode 100644 index 0000000000..77bc5b9791 --- /dev/null +++ b/meta/lib/oeqa/runtime/cases/uki.py | |||
@@ -0,0 +1,16 @@ | |||
1 | # SPDX-License-Identifier: MIT | ||
2 | # | ||
3 | |||
4 | from oeqa.runtime.case import OERuntimeTestCase | ||
5 | from oeqa.core.decorator.data import skipIfNotInDataVar | ||
6 | |||
7 | class UkiTest(OERuntimeTestCase): | ||
8 | |||
9 | @skipIfNotInDataVar('IMAGE_CLASSES', 'uki', 'Test case uki is for images which use uki.bbclass') | ||
10 | def test_uki(self): | ||
11 | uki_filename = self.td.get('UKI_FILENAME') | ||
12 | status, output = self.target.run('ls /boot/EFI/Linux/%s' % uki_filename) | ||
13 | self.assertEqual(status, 0, output) | ||
14 | |||
15 | status, output = self.target.run('echo $( cat /sys/firmware/efi/efivars/LoaderEntrySelected-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep %s' % uki_filename) | ||
16 | self.assertEqual(status, 0, output) | ||
diff --git a/meta/lib/oeqa/runtime/context.py b/meta/lib/oeqa/runtime/context.py index cb7227a8df..daabc44910 100644 --- a/meta/lib/oeqa/runtime/context.py +++ b/meta/lib/oeqa/runtime/context.py | |||
@@ -8,6 +8,7 @@ import os | |||
8 | import sys | 8 | import sys |
9 | 9 | ||
10 | from oeqa.core.context import OETestContext, OETestContextExecutor | 10 | from oeqa.core.context import OETestContext, OETestContextExecutor |
11 | from oeqa.core.target.serial import OESerialTarget | ||
11 | from oeqa.core.target.ssh import OESSHTarget | 12 | from oeqa.core.target.ssh import OESSHTarget |
12 | from oeqa.core.target.qemu import OEQemuTarget | 13 | from oeqa.core.target.qemu import OEQemuTarget |
13 | 14 | ||
@@ -60,7 +61,7 @@ class OERuntimeTestContextExecutor(OETestContextExecutor): | |||
60 | runtime_group = self.parser.add_argument_group('runtime options') | 61 | runtime_group = self.parser.add_argument_group('runtime options') |
61 | 62 | ||
62 | runtime_group.add_argument('--target-type', action='store', | 63 | runtime_group.add_argument('--target-type', action='store', |
63 | default=self.default_target_type, choices=['simpleremote', 'qemu'], | 64 | default=self.default_target_type, choices=['simpleremote', 'qemu', 'serial'], |
64 | help="Target type of device under test, default: %s" \ | 65 | help="Target type of device under test, default: %s" \ |
65 | % self.default_target_type) | 66 | % self.default_target_type) |
66 | runtime_group.add_argument('--target-ip', action='store', | 67 | runtime_group.add_argument('--target-ip', action='store', |
@@ -108,6 +109,8 @@ class OERuntimeTestContextExecutor(OETestContextExecutor): | |||
108 | target = OESSHTarget(logger, target_ip, server_ip, **kwargs) | 109 | target = OESSHTarget(logger, target_ip, server_ip, **kwargs) |
109 | elif target_type == 'qemu': | 110 | elif target_type == 'qemu': |
110 | target = OEQemuTarget(logger, server_ip, **kwargs) | 111 | target = OEQemuTarget(logger, server_ip, **kwargs) |
112 | elif target_type == 'serial': | ||
113 | target = OESerialTarget(logger, target_ip, server_ip, **kwargs) | ||
111 | else: | 114 | else: |
112 | # XXX: This code uses the old naming convention for controllers and | 115 | # XXX: This code uses the old naming convention for controllers and |
113 | # targets, the idea it is to leave just targets as the controller | 116 | # targets, the idea it is to leave just targets as the controller |
@@ -203,8 +206,15 @@ class OERuntimeTestContextExecutor(OETestContextExecutor): | |||
203 | 206 | ||
204 | super(OERuntimeTestContextExecutor, self)._process_args(logger, args) | 207 | super(OERuntimeTestContextExecutor, self)._process_args(logger, args) |
205 | 208 | ||
209 | td = self.tc_kwargs['init']['td'] | ||
210 | |||
206 | target_kwargs = {} | 211 | target_kwargs = {} |
212 | target_kwargs['machine'] = td.get("MACHINE") or None | ||
207 | target_kwargs['qemuboot'] = args.qemu_boot | 213 | target_kwargs['qemuboot'] = args.qemu_boot |
214 | target_kwargs['serialcontrol_cmd'] = td.get("TEST_SERIALCONTROL_CMD") or None | ||
215 | target_kwargs['serialcontrol_extra_args'] = td.get("TEST_SERIALCONTROL_EXTRA_ARGS") or "" | ||
216 | target_kwargs['serialcontrol_ps1'] = td.get("TEST_SERIALCONTROL_PS1") or None | ||
217 | target_kwargs['serialcontrol_connect_timeout'] = td.get("TEST_SERIALCONTROL_CONNECT_TIMEOUT") or None | ||
208 | 218 | ||
209 | self.tc_kwargs['init']['target'] = \ | 219 | self.tc_kwargs['init']['target'] = \ |
210 | OERuntimeTestContextExecutor.getTarget(args.target_type, | 220 | OERuntimeTestContextExecutor.getTarget(args.target_type, |
diff --git a/meta/lib/oeqa/sdk/case.py b/meta/lib/oeqa/sdk/case.py index c45882689c..1fd3b3b569 100644 --- a/meta/lib/oeqa/sdk/case.py +++ b/meta/lib/oeqa/sdk/case.py | |||
@@ -6,8 +6,11 @@ | |||
6 | 6 | ||
7 | import os | 7 | import os |
8 | import subprocess | 8 | import subprocess |
9 | import shutil | ||
10 | import unittest | ||
9 | 11 | ||
10 | from oeqa.core.case import OETestCase | 12 | from oeqa.core.case import OETestCase |
13 | from oeqa.sdkext.context import OESDKExtTestContext | ||
11 | 14 | ||
12 | class OESDKTestCase(OETestCase): | 15 | class OESDKTestCase(OETestCase): |
13 | def _run(self, cmd): | 16 | def _run(self, cmd): |
@@ -15,18 +18,76 @@ class OESDKTestCase(OETestCase): | |||
15 | (self.tc.sdk_env, cmd), shell=True, executable="/bin/bash", | 18 | (self.tc.sdk_env, cmd), shell=True, executable="/bin/bash", |
16 | stderr=subprocess.STDOUT, universal_newlines=True) | 19 | stderr=subprocess.STDOUT, universal_newlines=True) |
17 | 20 | ||
21 | def ensure_host_package(self, *packages, recipe=None): | ||
22 | """ | ||
23 | Check that the host variation of one of the packages listed is available | ||
24 | in the SDK (nativesdk-foo for SDK, foo-native for eSDK). The package is | ||
25 | a list for the case where debian-renaming may have occured, and the | ||
26 | manifest could contain 'foo' or 'libfoo'. | ||
27 | |||
28 | If testing an eSDK and the package is not found, then try to install the | ||
29 | specified recipe to install it from sstate. | ||
30 | """ | ||
31 | |||
32 | # In a SDK the manifest is correct. In an eSDK the manifest may be | ||
33 | # correct (type=full) or not include packages that exist in sstate but | ||
34 | # not installed yet (minimal) so we should try to install the recipe. | ||
35 | for package in packages: | ||
36 | if isinstance(self.tc, OESDKExtTestContext): | ||
37 | package = package + "-native" | ||
38 | else: | ||
39 | package = "nativesdk-" + package | ||
40 | |||
41 | if self.tc.hasHostPackage(package): | ||
42 | break | ||
43 | else: | ||
44 | if isinstance(self.tc, OESDKExtTestContext): | ||
45 | recipe = (recipe or packages[0]) + "-native" | ||
46 | print("Trying to install %s..." % recipe) | ||
47 | self._run('devtool sdk-install %s' % recipe) | ||
48 | else: | ||
49 | raise unittest.SkipTest("Test %s needs one of %s" % (self.id(), ", ".join(packages))) | ||
50 | |||
51 | def ensure_target_package(self, *packages, multilib=False, recipe=None): | ||
52 | """ | ||
53 | Check that at least one of the packages listed is available in the SDK, | ||
54 | adding the multilib prefix if required. The target package is a list for | ||
55 | the case where debian-renaming may have occured, and the manifest could | ||
56 | contain 'foo' or 'libfoo'. | ||
57 | |||
58 | If testing an eSDK and the package is not found, then try to install the | ||
59 | specified recipe to install it from sstate. | ||
60 | """ | ||
61 | |||
62 | # In a SDK the manifest is correct. In an eSDK the manifest may be | ||
63 | # correct (type=full) or not include packages that exist in sstate but | ||
64 | # not installed yet (minimal) so we should try to install the recipe. | ||
65 | for package in packages: | ||
66 | if self.tc.hasTargetPackage(package, multilib=multilib): | ||
67 | break | ||
68 | else: | ||
69 | if isinstance(self.tc, OESDKExtTestContext): | ||
70 | recipe = recipe or packages[0] | ||
71 | print("Trying to install %s..." % recipe) | ||
72 | self._run('devtool sdk-install %s' % recipe) | ||
73 | else: | ||
74 | raise unittest.SkipTest("Test %s needs one of %s" % (self.id(), ", ".join(packages))) | ||
75 | |||
76 | |||
18 | def fetch(self, workdir, dl_dir, url, archive=None): | 77 | def fetch(self, workdir, dl_dir, url, archive=None): |
19 | if not archive: | 78 | if not archive: |
20 | from urllib.parse import urlparse | 79 | from urllib.parse import urlparse |
21 | archive = os.path.basename(urlparse(url).path) | 80 | archive = os.path.basename(urlparse(url).path) |
22 | 81 | ||
23 | if dl_dir: | 82 | if dl_dir: |
24 | tarball = os.path.join(dl_dir, archive) | 83 | archive_tarball = os.path.join(dl_dir, archive) |
25 | if os.path.exists(tarball): | 84 | if os.path.exists(archive_tarball): |
26 | return tarball | 85 | return archive_tarball |
27 | 86 | ||
28 | tarball = os.path.join(workdir, archive) | 87 | tarball = os.path.join(workdir, archive) |
29 | subprocess.check_output(["wget", "-O", tarball, url], stderr=subprocess.STDOUT) | 88 | subprocess.check_output(["wget", "-O", tarball, url], stderr=subprocess.STDOUT) |
89 | if dl_dir and not os.path.exists(archive_tarball): | ||
90 | shutil.copyfile(tarball, archive_tarball) | ||
30 | return tarball | 91 | return tarball |
31 | 92 | ||
32 | def check_elf(self, path, target_os=None, target_arch=None): | 93 | def check_elf(self, path, target_os=None, target_arch=None): |
diff --git a/meta/lib/oeqa/sdk/cases/autotools.py b/meta/lib/oeqa/sdk/cases/autotools.py index 848e9392ec..ee6c522551 100644 --- a/meta/lib/oeqa/sdk/cases/autotools.py +++ b/meta/lib/oeqa/sdk/cases/autotools.py | |||
@@ -7,6 +7,7 @@ | |||
7 | import os | 7 | import os |
8 | import tempfile | 8 | import tempfile |
9 | import subprocess | 9 | import subprocess |
10 | import unittest | ||
10 | 11 | ||
11 | from oeqa.sdk.case import OESDKTestCase | 12 | from oeqa.sdk.case import OESDKTestCase |
12 | from oeqa.utils.subprocesstweak import errors_have_output | 13 | from oeqa.utils.subprocesstweak import errors_have_output |
@@ -16,6 +17,11 @@ class AutotoolsTest(OESDKTestCase): | |||
16 | """ | 17 | """ |
17 | Check that autotools will cross-compile correctly. | 18 | Check that autotools will cross-compile correctly. |
18 | """ | 19 | """ |
20 | def setUp(self): | ||
21 | libc = self.td.get("TCLIBC") | ||
22 | if libc in [ 'newlib' ]: | ||
23 | raise unittest.SkipTest("AutotoolsTest class: SDK doesn't contain a supported C library") | ||
24 | |||
19 | def test_cpio(self): | 25 | def test_cpio(self): |
20 | with tempfile.TemporaryDirectory(prefix="cpio-", dir=self.tc.sdk_dir) as testdir: | 26 | with tempfile.TemporaryDirectory(prefix="cpio-", dir=self.tc.sdk_dir) as testdir: |
21 | tarball = self.fetch(testdir, self.td["DL_DIR"], "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz") | 27 | tarball = self.fetch(testdir, self.td["DL_DIR"], "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz") |
@@ -29,8 +35,14 @@ class AutotoolsTest(OESDKTestCase): | |||
29 | self.assertTrue(os.path.isdir(dirs["source"])) | 35 | self.assertTrue(os.path.isdir(dirs["source"])) |
30 | os.makedirs(dirs["build"]) | 36 | os.makedirs(dirs["build"]) |
31 | 37 | ||
32 | self._run("cd {build} && {source}/configure $CONFIGURE_FLAGS".format(**dirs)) | 38 | self._run("cd {build} && {source}/configure CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration' $CONFIGURE_FLAGS".format(**dirs)) |
33 | self._run("cd {build} && make -j".format(**dirs)) | 39 | |
40 | # Check that configure detected the target correctly | ||
41 | with open(os.path.join(dirs["build"], "config.log")) as f: | ||
42 | host_sys = self.td["HOST_SYS"] | ||
43 | self.assertIn(f"host_alias='{host_sys}'\n", f.readlines()) | ||
44 | |||
45 | self._run("cd {build} && make CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration' -j".format(**dirs)) | ||
34 | self._run("cd {build} && make install DESTDIR={install}".format(**dirs)) | 46 | self._run("cd {build} && make install DESTDIR={install}".format(**dirs)) |
35 | 47 | ||
36 | self.check_elf(os.path.join(dirs["install"], "usr", "local", "bin", "cpio")) | 48 | self.check_elf(os.path.join(dirs["install"], "usr", "local", "bin", "cpio")) |
diff --git a/meta/lib/oeqa/sdk/cases/cmake.py b/meta/lib/oeqa/sdk/cases/cmake.py index db7d826a38..070682ef08 100644 --- a/meta/lib/oeqa/sdk/cases/cmake.py +++ b/meta/lib/oeqa/sdk/cases/cmake.py | |||
@@ -19,9 +19,11 @@ class CMakeTest(OESDKTestCase): | |||
19 | """ | 19 | """ |
20 | 20 | ||
21 | def setUp(self): | 21 | def setUp(self): |
22 | if not (self.tc.hasHostPackage("nativesdk-cmake") or | 22 | libc = self.td.get("TCLIBC") |
23 | self.tc.hasHostPackage("cmake-native")): | 23 | if libc in [ 'newlib' ]: |
24 | raise unittest.SkipTest("CMakeTest: needs cmake") | 24 | raise unittest.SkipTest("CMakeTest class: SDK doesn't contain a supported C library") |
25 | |||
26 | self.ensure_host_package("cmake") | ||
25 | 27 | ||
26 | def test_assimp(self): | 28 | def test_assimp(self): |
27 | with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir: | 29 | with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir: |
diff --git a/meta/lib/oeqa/sdk/cases/gcc.py b/meta/lib/oeqa/sdk/cases/gcc.py index fc28b9c3d4..e810d2c42b 100644 --- a/meta/lib/oeqa/sdk/cases/gcc.py +++ b/meta/lib/oeqa/sdk/cases/gcc.py | |||
@@ -26,6 +26,10 @@ class GccCompileTest(OESDKTestCase): | |||
26 | os.path.join(self.tc.sdk_dir, f)) | 26 | os.path.join(self.tc.sdk_dir, f)) |
27 | 27 | ||
28 | def setUp(self): | 28 | def setUp(self): |
29 | libc = self.td.get("TCLIBC") | ||
30 | if libc in [ 'newlib' ]: | ||
31 | raise unittest.SkipTest("GccCompileTest class: SDK doesn't contain a supported C library") | ||
32 | |||
29 | machine = self.td.get("MACHINE") | 33 | machine = self.td.get("MACHINE") |
30 | if not (self.tc.hasHostPackage("packagegroup-cross-canadian-%s" % machine) or | 34 | if not (self.tc.hasHostPackage("packagegroup-cross-canadian-%s" % machine) or |
31 | self.tc.hasHostPackage("^gcc-", regex=True)): | 35 | self.tc.hasHostPackage("^gcc-", regex=True)): |
diff --git a/meta/lib/oeqa/sdk/cases/gtk3.py b/meta/lib/oeqa/sdk/cases/gtk3.py index c329c4bb86..cdaf50ed38 100644 --- a/meta/lib/oeqa/sdk/cases/gtk3.py +++ b/meta/lib/oeqa/sdk/cases/gtk3.py | |||
@@ -7,40 +7,34 @@ | |||
7 | import os | 7 | import os |
8 | import subprocess | 8 | import subprocess |
9 | import tempfile | 9 | import tempfile |
10 | import unittest | ||
11 | 10 | ||
12 | from oeqa.sdk.case import OESDKTestCase | 11 | from oeqa.sdk.cases.meson import MesonTestBase |
12 | |||
13 | from oeqa.utils.subprocesstweak import errors_have_output | 13 | from oeqa.utils.subprocesstweak import errors_have_output |
14 | errors_have_output() | 14 | errors_have_output() |
15 | 15 | ||
16 | class GTK3Test(OESDKTestCase): | 16 | class GTK3Test(MesonTestBase): |
17 | |||
18 | def setUp(self): | ||
19 | super().setUp() | ||
20 | self.ensure_target_package("gtk+3", "libgtk-3.0", recipe="gtk+3") | ||
21 | self.ensure_host_package("glib-2.0-utils", "libglib-2.0-utils", recipe="glib-2.0") | ||
22 | |||
17 | """ | 23 | """ |
18 | Test that autotools and GTK+ 3 compiles correctly. | 24 | Test that autotools and GTK+ 3 compiles correctly. |
19 | """ | 25 | """ |
20 | def setUp(self): | 26 | def test_libhandy(self): |
21 | if not (self.tc.hasTargetPackage("gtk+3", multilib=True) or \ | 27 | with tempfile.TemporaryDirectory(prefix="libhandy", dir=self.tc.sdk_dir) as testdir: |
22 | self.tc.hasTargetPackage("libgtk-3.0", multilib=True)): | 28 | tarball = self.fetch(testdir, self.td["DL_DIR"], "https://download.gnome.org/sources/libhandy/1.8/libhandy-1.8.3.tar.xz") |
23 | raise unittest.SkipTest("GalculatorTest class: SDK don't support gtk+3") | ||
24 | if not (self.tc.hasHostPackage("nativesdk-gettext-dev") or | ||
25 | self.tc.hasHostPackage("gettext-native")): | ||
26 | raise unittest.SkipTest("GalculatorTest class: SDK doesn't contain gettext") | ||
27 | |||
28 | def test_galculator(self): | ||
29 | with tempfile.TemporaryDirectory(prefix="galculator", dir=self.tc.sdk_dir) as testdir: | ||
30 | tarball = self.fetch(testdir, self.td["DL_DIR"], "http://galculator.mnim.org/downloads/galculator-2.1.4.tar.bz2") | ||
31 | |||
32 | dirs = {} | ||
33 | dirs["source"] = os.path.join(testdir, "galculator-2.1.4") | ||
34 | dirs["build"] = os.path.join(testdir, "build") | ||
35 | dirs["install"] = os.path.join(testdir, "install") | ||
36 | 29 | ||
37 | subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT) | 30 | sourcedir = os.path.join(testdir, "libhandy-1.8.3") |
38 | self.assertTrue(os.path.isdir(dirs["source"])) | 31 | builddir = os.path.join(testdir, "build") |
39 | os.makedirs(dirs["build"]) | 32 | installdir = os.path.join(testdir, "install") |
40 | 33 | ||
41 | self._run("cd {source} && sed -i -e '/s_preferences.*prefs;/d' src/main.c && autoreconf -i -f -I $OECORE_TARGET_SYSROOT/usr/share/aclocal -I m4".format(**dirs)) | 34 | subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT) |
42 | self._run("cd {build} && {source}/configure $CONFIGURE_FLAGS".format(**dirs)) | 35 | self.assertTrue(os.path.isdir(sourcedir)) |
43 | self._run("cd {build} && make -j".format(**dirs)) | 36 | os.makedirs(builddir) |
44 | self._run("cd {build} && make install DESTDIR={install}".format(**dirs)) | ||
45 | 37 | ||
46 | self.check_elf(os.path.join(dirs["install"], "usr", "local", "bin", "galculator")) | 38 | self.build_meson(sourcedir, builddir, installdir, "-Dglade_catalog=disabled -Dintrospection=disabled -Dvapi=false") |
39 | self.assertTrue(os.path.isdir(installdir)) | ||
40 | self.check_elf(os.path.join(installdir, "usr", "local", "lib", "libhandy-1.so")) | ||
diff --git a/meta/lib/oeqa/sdk/cases/kmod.py b/meta/lib/oeqa/sdk/cases/kmod.py new file mode 100644 index 0000000000..0aa6f702e4 --- /dev/null +++ b/meta/lib/oeqa/sdk/cases/kmod.py | |||
@@ -0,0 +1,39 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | import os | ||
8 | import subprocess | ||
9 | import tempfile | ||
10 | |||
11 | from oeqa.sdk.case import OESDKTestCase | ||
12 | from oeqa.sdkext.context import OESDKExtTestContext | ||
13 | from oeqa.utils.subprocesstweak import errors_have_output | ||
14 | errors_have_output() | ||
15 | |||
16 | class KernelModuleTest(OESDKTestCase): | ||
17 | """ | ||
18 | Test that out-of-tree kernel modules build. | ||
19 | """ | ||
20 | def test_cryptodev(self): | ||
21 | if isinstance(self.tc, OESDKExtTestContext): | ||
22 | self.skipTest(f"{self.id()} does not support eSDK (https://bugzilla.yoctoproject.org/show_bug.cgi?id=15850)") | ||
23 | |||
24 | self.ensure_target_package("kernel-devsrc") | ||
25 | # These targets need to be built before kernel modules can be built. | ||
26 | self._run("make -j -C $OECORE_TARGET_SYSROOT/usr/src/kernel prepare scripts") | ||
27 | |||
28 | with tempfile.TemporaryDirectory(prefix="cryptodev", dir=self.tc.sdk_dir) as testdir: | ||
29 | git_url = "https://github.com/cryptodev-linux/cryptodev-linux" | ||
30 | # This is a knnown-good commit post-1.13 that builds with kernel 6.7+ | ||
31 | git_sha = "bb8bc7cf60d2c0b097c8b3b0e807f805b577a53f" | ||
32 | |||
33 | sourcedir = os.path.join(testdir, "cryptodev-linux") | ||
34 | subprocess.check_output(["git", "clone", git_url, sourcedir], stderr=subprocess.STDOUT) | ||
35 | self.assertTrue(os.path.isdir(sourcedir)) | ||
36 | subprocess.check_output(["git", "-C", sourcedir, "checkout", git_sha], stderr=subprocess.STDOUT) | ||
37 | |||
38 | self._run("make -C %s V=1 KERNEL_DIR=$OECORE_TARGET_SYSROOT/usr/src/kernel" % sourcedir) | ||
39 | self.check_elf(os.path.join(sourcedir, "cryptodev.ko")) | ||
diff --git a/meta/lib/oeqa/sdk/cases/makefile.py b/meta/lib/oeqa/sdk/cases/makefile.py index 2ff54ce25f..e1e2484820 100644 --- a/meta/lib/oeqa/sdk/cases/makefile.py +++ b/meta/lib/oeqa/sdk/cases/makefile.py | |||
@@ -5,6 +5,7 @@ | |||
5 | # | 5 | # |
6 | 6 | ||
7 | import os, tempfile, subprocess | 7 | import os, tempfile, subprocess |
8 | import unittest | ||
8 | from oeqa.sdk.case import OESDKTestCase | 9 | from oeqa.sdk.case import OESDKTestCase |
9 | from oeqa.utils.subprocesstweak import errors_have_output | 10 | from oeqa.utils.subprocesstweak import errors_have_output |
10 | errors_have_output() | 11 | errors_have_output() |
@@ -13,6 +14,11 @@ class MakefileTest(OESDKTestCase): | |||
13 | """ | 14 | """ |
14 | Test that "plain" compilation works, using just $CC $CFLAGS etc. | 15 | Test that "plain" compilation works, using just $CC $CFLAGS etc. |
15 | """ | 16 | """ |
17 | def setUp(self): | ||
18 | libc = self.td.get("TCLIBC") | ||
19 | if libc in [ 'newlib' ]: | ||
20 | raise unittest.SkipTest("MakefileTest class: SDK doesn't contain a supported C library") | ||
21 | |||
16 | def test_lzip(self): | 22 | def test_lzip(self): |
17 | with tempfile.TemporaryDirectory(prefix="lzip", dir=self.tc.sdk_dir) as testdir: | 23 | with tempfile.TemporaryDirectory(prefix="lzip", dir=self.tc.sdk_dir) as testdir: |
18 | tarball = self.fetch(testdir, self.td["DL_DIR"], "http://downloads.yoctoproject.org/mirror/sources/lzip-1.19.tar.gz") | 24 | tarball = self.fetch(testdir, self.td["DL_DIR"], "http://downloads.yoctoproject.org/mirror/sources/lzip-1.19.tar.gz") |
diff --git a/meta/lib/oeqa/sdk/cases/manifest.py b/meta/lib/oeqa/sdk/cases/manifest.py new file mode 100644 index 0000000000..ee59a5f338 --- /dev/null +++ b/meta/lib/oeqa/sdk/cases/manifest.py | |||
@@ -0,0 +1,26 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | from oeqa.sdk.case import OESDKTestCase | ||
8 | from oeqa.sdkext.context import OESDKExtTestContext | ||
9 | |||
10 | |||
11 | class ManifestTest(OESDKTestCase): | ||
12 | def test_manifests(self): | ||
13 | """ | ||
14 | Verify that the host and target manifests are not empty, unless this is | ||
15 | a minimal eSDK without toolchain in which case they should be empty. | ||
16 | """ | ||
17 | if ( | ||
18 | isinstance(self.tc, OESDKExtTestContext) | ||
19 | and self.td.get("SDK_EXT_TYPE") == "minimal" | ||
20 | and self.td.get("SDK_INCLUDE_TOOLCHAIN") == "0" | ||
21 | ): | ||
22 | self.assertEqual(self.tc.target_pkg_manifest, {}) | ||
23 | self.assertEqual(self.tc.host_pkg_manifest, {}) | ||
24 | else: | ||
25 | self.assertNotEqual(self.tc.target_pkg_manifest, {}) | ||
26 | self.assertNotEqual(self.tc.host_pkg_manifest, {}) | ||
diff --git a/meta/lib/oeqa/sdk/cases/maturin.py b/meta/lib/oeqa/sdk/cases/maturin.py index 20f6b553d0..e3e8edc781 100644 --- a/meta/lib/oeqa/sdk/cases/maturin.py +++ b/meta/lib/oeqa/sdk/cases/maturin.py | |||
@@ -16,44 +16,24 @@ errors_have_output() | |||
16 | 16 | ||
17 | class MaturinTest(OESDKTestCase): | 17 | class MaturinTest(OESDKTestCase): |
18 | def setUp(self): | 18 | def setUp(self): |
19 | if not ( | 19 | self.ensure_host_package("python3-maturin") |
20 | self.tc.hasHostPackage("nativesdk-python3-maturin") | ||
21 | or self.tc.hasHostPackage("python3-maturin-native") | ||
22 | ): | ||
23 | raise unittest.SkipTest("No python3-maturin package in the SDK") | ||
24 | 20 | ||
25 | def test_maturin_list_python(self): | 21 | def test_maturin_list_python(self): |
26 | py_major = self._run("python3 -c 'import sys; print(sys.version_info.major)'") | 22 | out = self._run(r"""python3 -c 'import sys; print(f"{sys.executable}\n{sys.version_info.major}.{sys.version_info.minor}")'""") |
27 | py_minor = self._run("python3 -c 'import sys; print(sys.version_info.minor)'") | 23 | executable, version = out.splitlines() |
28 | python_version = "%s.%s" % (py_major.strip(), py_minor.strip()) | ||
29 | cmd = "maturin list-python" | ||
30 | output = self._run(cmd) | ||
31 | self.assertRegex(output, r"^🐍 1 python interpreter found:\n") | ||
32 | self.assertRegex( | ||
33 | output, | ||
34 | r" - CPython %s (.+)/usr/bin/python%s$" % (python_version, python_version), | ||
35 | ) | ||
36 | 24 | ||
25 | output = self._run("maturin list-python") | ||
26 | # The output looks like this: | ||
27 | # - CPython 3.13 at /usr/bin/python3 | ||
28 | # We don't want to assume CPython so just check for the version and path. | ||
29 | expected = f"{version} at {executable}" | ||
30 | self.assertIn(expected, output) | ||
37 | 31 | ||
38 | class MaturinDevelopTest(OESDKTestCase): | 32 | class MaturinDevelopTest(OESDKTestCase): |
39 | @classmethod | ||
40 | def setUpClass(self): | ||
41 | targetdir = os.path.join(self.tc.sdk_dir, "guessing-game") | ||
42 | try: | ||
43 | shutil.rmtree(targetdir) | ||
44 | except FileNotFoundError: | ||
45 | pass | ||
46 | shutil.copytree( | ||
47 | os.path.join(self.tc.files_dir, "maturin/guessing-game"), targetdir | ||
48 | ) | ||
49 | |||
50 | def setUp(self): | 33 | def setUp(self): |
51 | machine = self.td.get("MACHINE") | 34 | machine = self.td.get("MACHINE") |
52 | if not ( | 35 | self.ensure_host_package("python3-maturin") |
53 | self.tc.hasHostPackage("nativesdk-python3-maturin") | 36 | |
54 | or self.tc.hasHostPackage("python3-maturin-native") | ||
55 | ): | ||
56 | raise unittest.SkipTest("No python3-maturin package in the SDK") | ||
57 | if not ( | 37 | if not ( |
58 | self.tc.hasHostPackage("packagegroup-rust-cross-canadian-%s" % machine) | 38 | self.tc.hasHostPackage("packagegroup-rust-cross-canadian-%s" % machine) |
59 | ): | 39 | ): |
@@ -67,9 +47,17 @@ class MaturinDevelopTest(OESDKTestCase): | |||
67 | (1) that a .venv can been created. | 47 | (1) that a .venv can been created. |
68 | (2) a functional 'rustc' and 'cargo' | 48 | (2) a functional 'rustc' and 'cargo' |
69 | """ | 49 | """ |
70 | self._run("cd %s/guessing-game; python3 -m venv .venv" % self.tc.sdk_dir) | 50 | targetdir = os.path.join(self.tc.sdk_dir, "guessing-game") |
71 | cmd = "cd %s/guessing-game; maturin develop" % self.tc.sdk_dir | 51 | try: |
72 | output = self._run(cmd) | 52 | shutil.rmtree(targetdir) |
53 | except FileNotFoundError: | ||
54 | pass | ||
55 | shutil.copytree( | ||
56 | os.path.join(self.tc.files_dir, "maturin/guessing-game"), targetdir | ||
57 | ) | ||
58 | |||
59 | self._run("cd %s; python3 -m venv .venv" % targetdir) | ||
60 | output = self._run("cd %s; maturin develop" % targetdir) | ||
73 | self.assertRegex(output, r"🔗 Found pyo3 bindings with abi3 support for Python ≥ 3.8") | 61 | self.assertRegex(output, r"🔗 Found pyo3 bindings with abi3 support for Python ≥ 3.8") |
74 | self.assertRegex(output, r"🐍 Not using a specific python interpreter") | 62 | self.assertRegex(output, r"🐍 Not using a specific python interpreter") |
75 | self.assertRegex(output, r"📡 Using build options features from pyproject.toml") | 63 | self.assertRegex(output, r"📡 Using build options features from pyproject.toml") |
diff --git a/meta/lib/oeqa/sdk/cases/meson.py b/meta/lib/oeqa/sdk/cases/meson.py index be53df204a..a809ca3a53 100644 --- a/meta/lib/oeqa/sdk/cases/meson.py +++ b/meta/lib/oeqa/sdk/cases/meson.py | |||
@@ -4,41 +4,69 @@ | |||
4 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
5 | # | 5 | # |
6 | 6 | ||
7 | import json | ||
7 | import os | 8 | import os |
8 | import subprocess | 9 | import subprocess |
9 | import tempfile | 10 | import tempfile |
10 | import unittest | 11 | import unittest |
11 | 12 | ||
12 | from oeqa.sdk.case import OESDKTestCase | 13 | from oeqa.sdk.case import OESDKTestCase |
14 | from oeqa.sdkext.context import OESDKExtTestContext | ||
13 | from oeqa.utils.subprocesstweak import errors_have_output | 15 | from oeqa.utils.subprocesstweak import errors_have_output |
14 | errors_have_output() | 16 | errors_have_output() |
15 | 17 | ||
16 | class MesonTest(OESDKTestCase): | 18 | class MesonTestBase(OESDKTestCase): |
19 | def setUp(self): | ||
20 | libc = self.td.get("TCLIBC") | ||
21 | if libc in [ 'newlib' ]: | ||
22 | raise unittest.SkipTest("MesonTest class: SDK doesn't contain a supported C library") | ||
23 | |||
24 | if isinstance(self.tc, OESDKExtTestContext): | ||
25 | self.skipTest(f"{self.id()} does not support eSDK (https://bugzilla.yoctoproject.org/show_bug.cgi?id=15854)") | ||
26 | |||
27 | self.ensure_host_package("meson") | ||
28 | self.ensure_host_package("pkgconfig") | ||
29 | |||
30 | def build_meson(self, sourcedir, builddir, installdir=None, options=""): | ||
31 | """ | ||
32 | Given a source tree in sourcedir, configure it to build in builddir with | ||
33 | the specified options, and if installdir is set also install. | ||
34 | """ | ||
35 | log = self._run(f"meson setup --warnlevel 1 {builddir} {sourcedir} {options}") | ||
36 | |||
37 | # Check that Meson thinks we're doing a cross build and not a native | ||
38 | self.assertIn("Build type: cross build", log) | ||
39 | |||
40 | # Check that the cross-compiler used is the one we set. | ||
41 | data = json.loads(self._run(f"meson introspect --compilers {builddir}")) | ||
42 | self.assertIn(self.td.get("CC").split()[0], data["host"]["c"]["exelist"]) | ||
43 | |||
44 | # Check that the target architectures was set correctly. | ||
45 | data = json.loads(self._run(f"meson introspect --machines {builddir}")) | ||
46 | self.assertEqual(data["host"]["cpu"], self.td["HOST_ARCH"]) | ||
47 | |||
48 | self._run(f"meson compile -C {builddir} -v") | ||
49 | |||
50 | if installdir: | ||
51 | self._run(f"meson install -C {builddir} --destdir {installdir}") | ||
52 | |||
53 | class MesonTest(MesonTestBase): | ||
17 | """ | 54 | """ |
18 | Test that Meson builds correctly. | 55 | Test that Meson builds correctly. |
19 | """ | 56 | """ |
20 | def setUp(self): | ||
21 | if not (self.tc.hasHostPackage("nativesdk-meson") or | ||
22 | self.tc.hasHostPackage("meson-native")): | ||
23 | raise unittest.SkipTest("MesonTest: needs meson") | ||
24 | 57 | ||
25 | def test_epoxy(self): | 58 | def test_epoxy(self): |
26 | with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir: | 59 | with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir: |
27 | tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/anholt/libepoxy/releases/download/1.5.3/libepoxy-1.5.3.tar.xz") | 60 | tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/anholt/libepoxy/releases/download/1.5.3/libepoxy-1.5.3.tar.xz") |
28 | 61 | ||
29 | dirs = {} | 62 | sourcedir = os.path.join(testdir, "libepoxy-1.5.3") |
30 | dirs["source"] = os.path.join(testdir, "libepoxy-1.5.3") | 63 | builddir = os.path.join(testdir, "build") |
31 | dirs["build"] = os.path.join(testdir, "build") | 64 | installdir = os.path.join(testdir, "install") |
32 | dirs["install"] = os.path.join(testdir, "install") | ||
33 | 65 | ||
34 | subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT) | 66 | subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT) |
35 | self.assertTrue(os.path.isdir(dirs["source"])) | 67 | self.assertTrue(os.path.isdir(sourcedir)) |
36 | os.makedirs(dirs["build"]) | ||
37 | |||
38 | log = self._run("meson --warnlevel 1 -Degl=no -Dglx=no -Dx11=false {build} {source}".format(**dirs)) | ||
39 | # Check that Meson thinks we're doing a cross build and not a native | ||
40 | self.assertIn("Build type: cross build", log) | ||
41 | self._run("ninja -C {build} -v".format(**dirs)) | ||
42 | self._run("DESTDIR={install} ninja -C {build} -v install".format(**dirs)) | ||
43 | 68 | ||
44 | self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libepoxy.so")) | 69 | os.makedirs(builddir) |
70 | self.build_meson(sourcedir, builddir, installdir, "-Degl=no -Dglx=no -Dx11=false") | ||
71 | self.assertTrue(os.path.isdir(installdir)) | ||
72 | self.check_elf(os.path.join(installdir, "usr", "local", "lib", "libepoxy.so")) | ||
diff --git a/meta/lib/oeqa/sdk/cases/perl.py b/meta/lib/oeqa/sdk/cases/perl.py index 8eab4442e8..a72bd2461a 100644 --- a/meta/lib/oeqa/sdk/cases/perl.py +++ b/meta/lib/oeqa/sdk/cases/perl.py | |||
@@ -4,7 +4,6 @@ | |||
4 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
5 | # | 5 | # |
6 | 6 | ||
7 | import unittest | ||
8 | from oeqa.sdk.case import OESDKTestCase | 7 | from oeqa.sdk.case import OESDKTestCase |
9 | 8 | ||
10 | from oeqa.utils.subprocesstweak import errors_have_output | 9 | from oeqa.utils.subprocesstweak import errors_have_output |
@@ -12,9 +11,7 @@ errors_have_output() | |||
12 | 11 | ||
13 | class PerlTest(OESDKTestCase): | 12 | class PerlTest(OESDKTestCase): |
14 | def setUp(self): | 13 | def setUp(self): |
15 | if not (self.tc.hasHostPackage("nativesdk-perl") or | 14 | self.ensure_host_package("perl") |
16 | self.tc.hasHostPackage("perl-native")): | ||
17 | raise unittest.SkipTest("No perl package in the SDK") | ||
18 | 15 | ||
19 | def test_perl(self): | 16 | def test_perl(self): |
20 | cmd = "perl -e '$_=\"Uryyb, jbeyq\"; tr/a-zA-Z/n-za-mN-ZA-M/;print'" | 17 | cmd = "perl -e '$_=\"Uryyb, jbeyq\"; tr/a-zA-Z/n-za-mN-ZA-M/;print'" |
diff --git a/meta/lib/oeqa/sdk/cases/python.py b/meta/lib/oeqa/sdk/cases/python.py index 51284949f5..b990cd889a 100644 --- a/meta/lib/oeqa/sdk/cases/python.py +++ b/meta/lib/oeqa/sdk/cases/python.py | |||
@@ -4,7 +4,6 @@ | |||
4 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
5 | # | 5 | # |
6 | 6 | ||
7 | import unittest | ||
8 | from oeqa.sdk.case import OESDKTestCase | 7 | from oeqa.sdk.case import OESDKTestCase |
9 | 8 | ||
10 | from oeqa.utils.subprocesstweak import errors_have_output | 9 | from oeqa.utils.subprocesstweak import errors_have_output |
@@ -12,9 +11,7 @@ errors_have_output() | |||
12 | 11 | ||
13 | class Python3Test(OESDKTestCase): | 12 | class Python3Test(OESDKTestCase): |
14 | def setUp(self): | 13 | def setUp(self): |
15 | if not (self.tc.hasHostPackage("nativesdk-python3-core") or | 14 | self.ensure_host_package("python3-core", recipe="python3") |
16 | self.tc.hasHostPackage("python3-core-native")): | ||
17 | raise unittest.SkipTest("No python3 package in the SDK") | ||
18 | 15 | ||
19 | def test_python3(self): | 16 | def test_python3(self): |
20 | cmd = "python3 -c \"import codecs; print(codecs.encode('Uryyb, jbeyq', 'rot13'))\"" | 17 | cmd = "python3 -c \"import codecs; print(codecs.encode('Uryyb, jbeyq', 'rot13'))\"" |
diff --git a/meta/lib/oeqa/sdk/cases/rust.py b/meta/lib/oeqa/sdk/cases/rust.py index a54245851b..4b115bebf5 100644 --- a/meta/lib/oeqa/sdk/cases/rust.py +++ b/meta/lib/oeqa/sdk/cases/rust.py | |||
@@ -31,6 +31,7 @@ class RustCompileTest(OESDKTestCase): | |||
31 | raise unittest.SkipTest("RustCompileTest class: SDK doesn't contain a Rust cross-canadian toolchain") | 31 | raise unittest.SkipTest("RustCompileTest class: SDK doesn't contain a Rust cross-canadian toolchain") |
32 | 32 | ||
33 | def test_cargo_build(self): | 33 | def test_cargo_build(self): |
34 | self._run('cd %s/hello; cargo add zstd' % (self.tc.sdk_dir)) | ||
34 | self._run('cd %s/hello; cargo build' % self.tc.sdk_dir) | 35 | self._run('cd %s/hello; cargo build' % self.tc.sdk_dir) |
35 | 36 | ||
36 | class RustHostCompileTest(OESDKTestCase): | 37 | class RustHostCompileTest(OESDKTestCase): |
@@ -52,5 +53,6 @@ class RustHostCompileTest(OESDKTestCase): | |||
52 | 53 | ||
53 | def test_cargo_build(self): | 54 | def test_cargo_build(self): |
54 | sdksys = self.td.get("SDK_SYS") | 55 | sdksys = self.td.get("SDK_SYS") |
56 | self._run('cd %s/hello; cargo add zstd' % (self.tc.sdk_dir)) | ||
55 | self._run('cd %s/hello; cargo build --target %s-gnu' % (self.tc.sdk_dir, sdksys)) | 57 | self._run('cd %s/hello; cargo build --target %s-gnu' % (self.tc.sdk_dir, sdksys)) |
56 | self._run('cd %s/hello; cargo run --target %s-gnu' % (self.tc.sdk_dir, sdksys)) | 58 | self._run('cd %s/hello; cargo run --target %s-gnu' % (self.tc.sdk_dir, sdksys)) |
diff --git a/meta/lib/oeqa/sdk/context.py b/meta/lib/oeqa/sdk/context.py index 01c38c24e6..d4fdd83207 100644 --- a/meta/lib/oeqa/sdk/context.py +++ b/meta/lib/oeqa/sdk/context.py | |||
@@ -23,6 +23,13 @@ class OESDKTestContext(OETestContext): | |||
23 | self.target_pkg_manifest = target_pkg_manifest | 23 | self.target_pkg_manifest = target_pkg_manifest |
24 | self.host_pkg_manifest = host_pkg_manifest | 24 | self.host_pkg_manifest = host_pkg_manifest |
25 | 25 | ||
26 | # match multilib according to sdk_env | ||
27 | self.multilib = "" | ||
28 | multilibs = self.td.get('MULTILIB_VARIANTS', '').split() | ||
29 | for ml in multilibs: | ||
30 | if ml in os.path.basename(self.sdk_env): | ||
31 | self.multilib = ml | ||
32 | |||
26 | def _hasPackage(self, manifest, pkg, regex=False): | 33 | def _hasPackage(self, manifest, pkg, regex=False): |
27 | if regex: | 34 | if regex: |
28 | # do regex match | 35 | # do regex match |
@@ -40,12 +47,8 @@ class OESDKTestContext(OETestContext): | |||
40 | return self._hasPackage(self.host_pkg_manifest, pkg, regex=regex) | 47 | return self._hasPackage(self.host_pkg_manifest, pkg, regex=regex) |
41 | 48 | ||
42 | def hasTargetPackage(self, pkg, multilib=False, regex=False): | 49 | def hasTargetPackage(self, pkg, multilib=False, regex=False): |
43 | if multilib: | 50 | if multilib and self.multilib: |
44 | # match multilib according to sdk_env | 51 | pkg = self.multilib + '-' + pkg |
45 | mls = self.td.get('MULTILIB_VARIANTS', '').split() | ||
46 | for ml in mls: | ||
47 | if ('ml'+ml) in self.sdk_env: | ||
48 | pkg = ml + '-' + pkg | ||
49 | return self._hasPackage(self.target_pkg_manifest, pkg, regex=regex) | 52 | return self._hasPackage(self.target_pkg_manifest, pkg, regex=regex) |
50 | 53 | ||
51 | class OESDKTestContextExecutor(OETestContextExecutor): | 54 | class OESDKTestContextExecutor(OETestContextExecutor): |
diff --git a/meta/lib/oeqa/sdk/testsdk.py b/meta/lib/oeqa/sdk/testsdk.py index 518b09febb..cffcf9f49a 100644 --- a/meta/lib/oeqa/sdk/testsdk.py +++ b/meta/lib/oeqa/sdk/testsdk.py | |||
@@ -31,6 +31,28 @@ class TestSDK(TestSDKBase): | |||
31 | context_class = OESDKTestContext | 31 | context_class = OESDKTestContext |
32 | test_type = 'sdk' | 32 | test_type = 'sdk' |
33 | 33 | ||
34 | def sdk_dir_names(self, d): | ||
35 | """Return list from TESTSDK_CASE_DIRS.""" | ||
36 | testdirs = d.getVar("TESTSDK_CASE_DIRS") | ||
37 | if testdirs: | ||
38 | return testdirs.split() | ||
39 | |||
40 | bb.fatal("TESTSDK_CASE_DIRS unset, can't find SDK test directories.") | ||
41 | |||
42 | def get_sdk_paths(self, d): | ||
43 | """ | ||
44 | Return a list of paths where SDK test cases reside. | ||
45 | |||
46 | SDK tests are expected in <LAYER_DIR>/lib/oeqa/<dirname>/cases | ||
47 | """ | ||
48 | paths = [] | ||
49 | for layer in d.getVar("BBLAYERS").split(): | ||
50 | for dirname in self.sdk_dir_names(d): | ||
51 | case_path = os.path.join(layer, "lib", "oeqa", dirname, "cases") | ||
52 | if os.path.isdir(case_path): | ||
53 | paths.append(case_path) | ||
54 | return paths | ||
55 | |||
34 | def get_tcname(self, d): | 56 | def get_tcname(self, d): |
35 | """ | 57 | """ |
36 | Get the name of the SDK file | 58 | Get the name of the SDK file |
@@ -114,7 +136,8 @@ class TestSDK(TestSDKBase): | |||
114 | host_pkg_manifest=host_pkg_manifest, **context_args) | 136 | host_pkg_manifest=host_pkg_manifest, **context_args) |
115 | 137 | ||
116 | try: | 138 | try: |
117 | tc.loadTests(self.context_executor_class.default_cases) | 139 | modules = (d.getVar("TESTSDK_SUITES") or "").split() |
140 | tc.loadTests(self.get_sdk_paths(d), modules) | ||
118 | except Exception as e: | 141 | except Exception as e: |
119 | import traceback | 142 | import traceback |
120 | bb.fatal("Loading tests failed:\n%s" % traceback.format_exc()) | 143 | bb.fatal("Loading tests failed:\n%s" % traceback.format_exc()) |
diff --git a/meta/lib/oeqa/sdkext/context.py b/meta/lib/oeqa/sdkext/context.py index 2ac2bf6ff7..2da57e2ccf 100644 --- a/meta/lib/oeqa/sdkext/context.py +++ b/meta/lib/oeqa/sdkext/context.py | |||
@@ -12,11 +12,11 @@ class OESDKExtTestContext(OESDKTestContext): | |||
12 | 12 | ||
13 | # FIXME - We really need to do better mapping of names here, this at | 13 | # FIXME - We really need to do better mapping of names here, this at |
14 | # least allows some tests to run | 14 | # least allows some tests to run |
15 | def hasHostPackage(self, pkg): | 15 | def hasHostPackage(self, pkg, regex=False): |
16 | # We force a toolchain to be installed into the eSDK even if its minimal | 16 | # We force a toolchain to be installed into the eSDK even if its minimal |
17 | if pkg.startswith("packagegroup-cross-canadian-"): | 17 | if pkg.startswith("packagegroup-cross-canadian-"): |
18 | return True | 18 | return True |
19 | return self._hasPackage(self.host_pkg_manifest, pkg) | 19 | return self._hasPackage(self.host_pkg_manifest, pkg, regex) |
20 | 20 | ||
21 | class OESDKExtTestContextExecutor(OESDKTestContextExecutor): | 21 | class OESDKExtTestContextExecutor(OESDKTestContextExecutor): |
22 | _context_class = OESDKExtTestContext | 22 | _context_class = OESDKExtTestContext |
diff --git a/meta/lib/oeqa/sdkext/testsdk.py b/meta/lib/oeqa/sdkext/testsdk.py index 9d5a99d900..6dc23065a4 100644 --- a/meta/lib/oeqa/sdkext/testsdk.py +++ b/meta/lib/oeqa/sdkext/testsdk.py | |||
@@ -82,7 +82,8 @@ class TestSDKExt(TestSDKBase): | |||
82 | host_pkg_manifest=host_pkg_manifest) | 82 | host_pkg_manifest=host_pkg_manifest) |
83 | 83 | ||
84 | try: | 84 | try: |
85 | tc.loadTests(OESDKExtTestContextExecutor.default_cases) | 85 | modules = (d.getVar("TESTSDK_SUITES") or "").split() |
86 | tc.loadTests(OESDKExtTestContextExecutor.default_cases, modules) | ||
86 | except Exception as e: | 87 | except Exception as e: |
87 | import traceback | 88 | import traceback |
88 | bb.fatal("Loading tests failed:\n%s" % traceback.format_exc()) | 89 | bb.fatal("Loading tests failed:\n%s" % traceback.format_exc()) |
diff --git a/meta/lib/oeqa/selftest/cases/archiver.py b/meta/lib/oeqa/selftest/cases/archiver.py index 3cb888c506..612ec675a7 100644 --- a/meta/lib/oeqa/selftest/cases/archiver.py +++ b/meta/lib/oeqa/selftest/cases/archiver.py | |||
@@ -190,28 +190,28 @@ class Archiver(OESelftestTestCase): | |||
190 | Test that the archiver works with `ARCHIVER_MODE[src] = "original"`. | 190 | Test that the archiver works with `ARCHIVER_MODE[src] = "original"`. |
191 | """ | 191 | """ |
192 | 192 | ||
193 | self._test_archiver_mode('original', 'ed-1.14.1.tar.lz') | 193 | self._test_archiver_mode('original', 'ed-1.21.1.tar.lz') |
194 | 194 | ||
195 | def test_archiver_mode_patched(self): | 195 | def test_archiver_mode_patched(self): |
196 | """ | 196 | """ |
197 | Test that the archiver works with `ARCHIVER_MODE[src] = "patched"`. | 197 | Test that the archiver works with `ARCHIVER_MODE[src] = "patched"`. |
198 | """ | 198 | """ |
199 | 199 | ||
200 | self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-patched.tar.xz') | 200 | self._test_archiver_mode('patched', 'selftest-ed-native-1.21.1-r0-patched.tar.xz') |
201 | 201 | ||
202 | def test_archiver_mode_configured(self): | 202 | def test_archiver_mode_configured(self): |
203 | """ | 203 | """ |
204 | Test that the archiver works with `ARCHIVER_MODE[src] = "configured"`. | 204 | Test that the archiver works with `ARCHIVER_MODE[src] = "configured"`. |
205 | """ | 205 | """ |
206 | 206 | ||
207 | self._test_archiver_mode('configured', 'selftest-ed-native-1.14.1-r0-configured.tar.xz') | 207 | self._test_archiver_mode('configured', 'selftest-ed-native-1.21.1-r0-configured.tar.xz') |
208 | 208 | ||
209 | def test_archiver_mode_recipe(self): | 209 | def test_archiver_mode_recipe(self): |
210 | """ | 210 | """ |
211 | Test that the archiver works with `ARCHIVER_MODE[recipe] = "1"`. | 211 | Test that the archiver works with `ARCHIVER_MODE[recipe] = "1"`. |
212 | """ | 212 | """ |
213 | 213 | ||
214 | self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-recipe.tar.xz', | 214 | self._test_archiver_mode('patched', 'selftest-ed-native-1.21.1-r0-recipe.tar.xz', |
215 | 'ARCHIVER_MODE[recipe] = "1"\n') | 215 | 'ARCHIVER_MODE[recipe] = "1"\n') |
216 | 216 | ||
217 | def test_archiver_mode_diff(self): | 217 | def test_archiver_mode_diff(self): |
@@ -220,7 +220,7 @@ class Archiver(OESelftestTestCase): | |||
220 | Exclusions controlled by `ARCHIVER_MODE[diff-exclude]` are not yet tested. | 220 | Exclusions controlled by `ARCHIVER_MODE[diff-exclude]` are not yet tested. |
221 | """ | 221 | """ |
222 | 222 | ||
223 | self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-diff.gz', | 223 | self._test_archiver_mode('patched', 'selftest-ed-native-1.21.1-r0-diff.gz', |
224 | 'ARCHIVER_MODE[diff] = "1"\n') | 224 | 'ARCHIVER_MODE[diff] = "1"\n') |
225 | 225 | ||
226 | def test_archiver_mode_dumpdata(self): | 226 | def test_archiver_mode_dumpdata(self): |
@@ -228,7 +228,7 @@ class Archiver(OESelftestTestCase): | |||
228 | Test that the archiver works with `ARCHIVER_MODE[dumpdata] = "1"`. | 228 | Test that the archiver works with `ARCHIVER_MODE[dumpdata] = "1"`. |
229 | """ | 229 | """ |
230 | 230 | ||
231 | self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-showdata.dump', | 231 | self._test_archiver_mode('patched', 'selftest-ed-native-1.21.1-r0-showdata.dump', |
232 | 'ARCHIVER_MODE[dumpdata] = "1"\n') | 232 | 'ARCHIVER_MODE[dumpdata] = "1"\n') |
233 | 233 | ||
234 | def test_archiver_mode_mirror(self): | 234 | def test_archiver_mode_mirror(self): |
@@ -236,7 +236,7 @@ class Archiver(OESelftestTestCase): | |||
236 | Test that the archiver works with `ARCHIVER_MODE[src] = "mirror"`. | 236 | Test that the archiver works with `ARCHIVER_MODE[src] = "mirror"`. |
237 | """ | 237 | """ |
238 | 238 | ||
239 | self._test_archiver_mode('mirror', 'ed-1.14.1.tar.lz', | 239 | self._test_archiver_mode('mirror', 'ed-1.21.1.tar.lz', |
240 | 'BB_GENERATE_MIRROR_TARBALLS = "1"\n') | 240 | 'BB_GENERATE_MIRROR_TARBALLS = "1"\n') |
241 | 241 | ||
242 | def test_archiver_mode_mirror_excludes(self): | 242 | def test_archiver_mode_mirror_excludes(self): |
@@ -247,7 +247,7 @@ class Archiver(OESelftestTestCase): | |||
247 | """ | 247 | """ |
248 | 248 | ||
249 | target='selftest-ed' | 249 | target='selftest-ed' |
250 | target_file_name = 'ed-1.14.1.tar.lz' | 250 | target_file_name = 'ed-1.21.1.tar.lz' |
251 | 251 | ||
252 | features = 'INHERIT += "archiver"\n' | 252 | features = 'INHERIT += "archiver"\n' |
253 | features += 'ARCHIVER_MODE[src] = "mirror"\n' | 253 | features += 'ARCHIVER_MODE[src] = "mirror"\n' |
@@ -285,7 +285,7 @@ class Archiver(OESelftestTestCase): | |||
285 | bitbake('-c deploy_archives %s' % (target)) | 285 | bitbake('-c deploy_archives %s' % (target)) |
286 | 286 | ||
287 | bb_vars = get_bb_vars(['DEPLOY_DIR_SRC']) | 287 | bb_vars = get_bb_vars(['DEPLOY_DIR_SRC']) |
288 | for target_file_name in ['ed-1.14.1.tar.lz', 'hello.c']: | 288 | for target_file_name in ['ed-1.21.1.tar.lz', 'hello.c']: |
289 | glob_str = os.path.join(bb_vars['DEPLOY_DIR_SRC'], 'mirror', target_file_name) | 289 | glob_str = os.path.join(bb_vars['DEPLOY_DIR_SRC'], 'mirror', target_file_name) |
290 | glob_result = glob.glob(glob_str) | 290 | glob_result = glob.glob(glob_str) |
291 | self.assertTrue(glob_result, 'Missing archive file %s' % (target_file_name)) | 291 | self.assertTrue(glob_result, 'Missing archive file %s' % (target_file_name)) |
diff --git a/meta/lib/oeqa/selftest/cases/barebox.py b/meta/lib/oeqa/selftest/cases/barebox.py new file mode 100644 index 0000000000..3f8f232432 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/barebox.py | |||
@@ -0,0 +1,44 @@ | |||
1 | # Qemu-based barebox bootloader integration testing | ||
2 | # | ||
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
5 | # SPDX-License-Identifier: MIT | ||
6 | # | ||
7 | |||
8 | from oeqa.selftest.case import OESelftestTestCase | ||
9 | from oeqa.utils.commands import bitbake, runqemu | ||
10 | from oeqa.core.decorator.data import skipIfNotArch | ||
11 | from oeqa.core.decorator import OETestTag | ||
12 | |||
13 | barebox_boot_patterns = { | ||
14 | 'search_reached_prompt': r"stop autoboot", | ||
15 | 'search_login_succeeded': r"barebox@[^:]+:[^ ]+ ", | ||
16 | 'search_cmd_finished': r"barebox@[a-zA-Z0-9\-\s]+:/" | ||
17 | } | ||
18 | |||
19 | |||
20 | class BareboxTest(OESelftestTestCase): | ||
21 | |||
22 | @skipIfNotArch(['arm', 'aarch64']) | ||
23 | @OETestTag("runqemu") | ||
24 | def test_boot_barebox(self): | ||
25 | """ | ||
26 | Tests building barebox and booting it with QEMU | ||
27 | """ | ||
28 | |||
29 | self.write_config(""" | ||
30 | QB_DEFAULT_KERNEL = "barebox-dt-2nd.img" | ||
31 | PREFERRED_PROVIDER_virtual/bootloader = "barebox" | ||
32 | QEMU_USE_KVM = "False" | ||
33 | """) | ||
34 | |||
35 | bitbake("virtual/bootloader core-image-minimal") | ||
36 | |||
37 | with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic', | ||
38 | boot_patterns=barebox_boot_patterns) as qemu: | ||
39 | |||
40 | # test if barebox console works | ||
41 | cmd = "version" | ||
42 | status, output = qemu.run_serial(cmd) | ||
43 | self.assertEqual(status, 1, msg=output) | ||
44 | self.assertTrue("barebox" in output, msg=output) | ||
diff --git a/meta/lib/oeqa/selftest/cases/bbclasses.py b/meta/lib/oeqa/selftest/cases/bbclasses.py new file mode 100644 index 0000000000..10545ebe65 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/bbclasses.py | |||
@@ -0,0 +1,106 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | from oeqa.selftest.case import OESelftestTestCase | ||
8 | from oeqa.utils.commands import get_bb_vars, bitbake | ||
9 | |||
10 | class Systemd(OESelftestTestCase): | ||
11 | """ | ||
12 | Tests related to the systemd bbclass. | ||
13 | """ | ||
14 | |||
15 | def getVars(self, recipe): | ||
16 | self.bb_vars = get_bb_vars( | ||
17 | [ | ||
18 | 'BPN', | ||
19 | 'D', | ||
20 | 'INIT_D_DIR', | ||
21 | 'prefix', | ||
22 | 'systemd_system_unitdir', | ||
23 | 'sysconfdir', | ||
24 | ], | ||
25 | recipe, | ||
26 | ) | ||
27 | |||
28 | def fileExists(self, filename): | ||
29 | self.assertExists(filename.format(**self.bb_vars)) | ||
30 | |||
31 | def fileNotExists(self, filename): | ||
32 | self.assertNotExists(filename.format(**self.bb_vars)) | ||
33 | |||
34 | def test_systemd_in_distro(self): | ||
35 | """ | ||
36 | Summary: Verify that no sysvinit files are installed when the | ||
37 | systemd distro feature is enabled, but sysvinit is not. | ||
38 | Expected: Systemd service file exists, but /etc does not. | ||
39 | Product: OE-Core | ||
40 | Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com> | ||
41 | """ | ||
42 | |||
43 | self.write_config(""" | ||
44 | DISTRO_FEATURES:append = " systemd usrmerge" | ||
45 | DISTRO_FEATURES:remove = "sysvinit" | ||
46 | VIRTUAL-RUNTIME_init_manager = "systemd" | ||
47 | """) | ||
48 | bitbake("systemd-only systemd-and-sysvinit -c install") | ||
49 | |||
50 | self.getVars("systemd-only") | ||
51 | self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service") | ||
52 | |||
53 | self.getVars("systemd-and-sysvinit") | ||
54 | self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service") | ||
55 | self.fileNotExists("{D}{sysconfdir}") | ||
56 | |||
57 | def test_systemd_and_sysvinit_in_distro(self): | ||
58 | """ | ||
59 | Summary: Verify that both systemd and sysvinit files are installed | ||
60 | when both the systemd and sysvinit distro features are | ||
61 | enabled. | ||
62 | Expected: Systemd service file and sysvinit initscript exist. | ||
63 | Product: OE-Core | ||
64 | Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com> | ||
65 | """ | ||
66 | |||
67 | self.write_config(""" | ||
68 | DISTRO_FEATURES:append = " systemd sysvinit usrmerge" | ||
69 | VIRTUAL-RUNTIME_init_manager = "systemd" | ||
70 | """) | ||
71 | bitbake("systemd-only systemd-and-sysvinit -c install") | ||
72 | |||
73 | self.getVars("systemd-only") | ||
74 | self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service") | ||
75 | |||
76 | self.getVars("systemd-and-sysvinit") | ||
77 | self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service") | ||
78 | self.fileExists("{D}{INIT_D_DIR}/{BPN}") | ||
79 | |||
80 | def test_sysvinit_in_distro(self): | ||
81 | """ | ||
82 | Summary: Verify that no systemd service files are installed when the | ||
83 | sysvinit distro feature is enabled, but systemd is not. | ||
84 | Expected: The systemd service file does not exist, nor does /usr. | ||
85 | The sysvinit initscript exists. | ||
86 | Product: OE-Core | ||
87 | Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com> | ||
88 | """ | ||
89 | |||
90 | self.write_config(""" | ||
91 | DISTRO_FEATURES:remove = "systemd" | ||
92 | DISTRO_FEATURES:append = " sysvinit usrmerge" | ||
93 | VIRTUAL-RUNTIME_init_manager = "sysvinit" | ||
94 | """) | ||
95 | bitbake("systemd-only systemd-and-sysvinit -c install") | ||
96 | |||
97 | self.getVars("systemd-only") | ||
98 | self.fileNotExists("{D}{systemd_system_unitdir}/{BPN}.service") | ||
99 | self.fileNotExists("{D}{prefix}") | ||
100 | self.fileNotExists("{D}{sysconfdir}") | ||
101 | self.fileExists("{D}") | ||
102 | |||
103 | self.getVars("systemd-and-sysvinit") | ||
104 | self.fileNotExists("{D}{systemd_system_unitdir}/{BPN}.service") | ||
105 | self.fileNotExists("{D}{prefix}") | ||
106 | self.fileExists("{D}{INIT_D_DIR}/{BPN}") | ||
diff --git a/meta/lib/oeqa/selftest/cases/bblayers.py b/meta/lib/oeqa/selftest/cases/bblayers.py index 695d17377d..68b0377720 100644 --- a/meta/lib/oeqa/selftest/cases/bblayers.py +++ b/meta/lib/oeqa/selftest/cases/bblayers.py | |||
@@ -240,3 +240,34 @@ class BitbakeLayers(OESelftestTestCase): | |||
240 | self.assertEqual(first_desc_2, '', "Describe not cleared: '{}'".format(first_desc_2)) | 240 | self.assertEqual(first_desc_2, '', "Describe not cleared: '{}'".format(first_desc_2)) |
241 | self.assertEqual(second_rev_2, second_rev_1, "Revision should not be updated: '{}'".format(second_rev_2)) | 241 | self.assertEqual(second_rev_2, second_rev_1, "Revision should not be updated: '{}'".format(second_rev_2)) |
242 | self.assertEqual(second_desc_2, second_desc_1, "Describe should not be updated: '{}'".format(second_desc_2)) | 242 | self.assertEqual(second_desc_2, second_desc_1, "Describe should not be updated: '{}'".format(second_desc_2)) |
243 | |||
244 | class BitbakeConfigBuild(OESelftestTestCase): | ||
245 | def test_enable_disable_fragments(self): | ||
246 | self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), None) | ||
247 | self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), None) | ||
248 | |||
249 | runCmd('bitbake-config-build enable-fragment selftest/test-fragment') | ||
250 | self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), 'somevalue') | ||
251 | self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), None) | ||
252 | |||
253 | runCmd('bitbake-config-build enable-fragment selftest/more-fragments-here/test-another-fragment') | ||
254 | self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), 'somevalue') | ||
255 | self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), 'someothervalue') | ||
256 | |||
257 | fragment_metadata_command = "bitbake-getvar -f {} --value {}" | ||
258 | result = runCmd(fragment_metadata_command.format("selftest/test-fragment", "BB_CONF_FRAGMENT_SUMMARY")) | ||
259 | self.assertIn("This is a configuration fragment intended for testing in oe-selftest context", result.output) | ||
260 | result = runCmd(fragment_metadata_command.format("selftest/test-fragment", "BB_CONF_FRAGMENT_DESCRIPTION")) | ||
261 | self.assertIn("It defines a variable that can be checked inside the test.", result.output) | ||
262 | result = runCmd(fragment_metadata_command.format("selftest/more-fragments-here/test-another-fragment", "BB_CONF_FRAGMENT_SUMMARY")) | ||
263 | self.assertIn("This is a second configuration fragment intended for testing in oe-selftest context", result.output) | ||
264 | result = runCmd(fragment_metadata_command.format("selftest/more-fragments-here/test-another-fragment", "BB_CONF_FRAGMENT_DESCRIPTION")) | ||
265 | self.assertIn("It defines another variable that can be checked inside the test.", result.output) | ||
266 | |||
267 | runCmd('bitbake-config-build disable-fragment selftest/test-fragment') | ||
268 | self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), None) | ||
269 | self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), 'someothervalue') | ||
270 | |||
271 | runCmd('bitbake-config-build disable-fragment selftest/more-fragments-here/test-another-fragment') | ||
272 | self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), None) | ||
273 | self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), None) | ||
diff --git a/meta/lib/oeqa/selftest/cases/bbtests.py b/meta/lib/oeqa/selftest/cases/bbtests.py index 98e9f81661..51934ef70d 100644 --- a/meta/lib/oeqa/selftest/cases/bbtests.py +++ b/meta/lib/oeqa/selftest/cases/bbtests.py | |||
@@ -233,6 +233,7 @@ INHERIT:remove = \"report-error\" | |||
233 | 233 | ||
234 | def test_non_gplv3(self): | 234 | def test_non_gplv3(self): |
235 | self.write_config('''INCOMPATIBLE_LICENSE = "GPL-3.0-or-later" | 235 | self.write_config('''INCOMPATIBLE_LICENSE = "GPL-3.0-or-later" |
236 | OVERRIDES .= ":gplv3test" | ||
236 | require conf/distro/include/no-gplv3.inc | 237 | require conf/distro/include/no-gplv3.inc |
237 | ''') | 238 | ''') |
238 | result = bitbake('selftest-ed', ignore_status=True) | 239 | result = bitbake('selftest-ed', ignore_status=True) |
@@ -241,7 +242,7 @@ require conf/distro/include/no-gplv3.inc | |||
241 | arch = get_bb_var('SSTATE_PKGARCH') | 242 | arch = get_bb_var('SSTATE_PKGARCH') |
242 | filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-3.0-or-later') | 243 | filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-3.0-or-later') |
243 | self.assertFalse(os.path.isfile(filename), msg="License file %s exists and shouldn't" % filename) | 244 | self.assertFalse(os.path.isfile(filename), msg="License file %s exists and shouldn't" % filename) |
244 | filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-2.0-or-later') | 245 | filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-2.0-only') |
245 | self.assertTrue(os.path.isfile(filename), msg="License file %s doesn't exist" % filename) | 246 | self.assertTrue(os.path.isfile(filename), msg="License file %s doesn't exist" % filename) |
246 | 247 | ||
247 | def test_setscene_only(self): | 248 | def test_setscene_only(self): |
@@ -375,3 +376,21 @@ require conf/distro/include/no-gplv3.inc | |||
375 | self.assertGreater(result.status, 0, "Build should have failed if ${ is in the path") | 376 | self.assertGreater(result.status, 0, "Build should have failed if ${ is in the path") |
376 | self.assertTrue(re.search("ERROR: Directory name /.* contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution", | 377 | self.assertTrue(re.search("ERROR: Directory name /.* contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution", |
377 | result.output), msg = "mkdirhier with unexpanded variable should have failed: %s" % result.output) | 378 | result.output), msg = "mkdirhier with unexpanded variable should have failed: %s" % result.output) |
379 | |||
380 | def test_bb_env_bb_getvar_equality(self): | ||
381 | """ Test if "bitbake -e" output is identical to "bitbake-getvar" output for a variable set from an anonymous function | ||
382 | """ | ||
383 | self.write_config('''INHERIT += "test_anon_func" | ||
384 | TEST_SET_FROM_ANON_FUNC ?= ""''') | ||
385 | |||
386 | result_bb_e = runCmd('bitbake -e') | ||
387 | bb_e_var_match = re.search('^TEST_SET_FROM_ANON_FUNC="(?P<value>.*)"$', result_bb_e.output, re.MULTILINE) | ||
388 | self.assertTrue(bb_e_var_match, msg = "Can't find TEST_SET_FROM_ANON_FUNC value in \"bitbake -e\" output") | ||
389 | bb_e_var_value = bb_e_var_match.group("value") | ||
390 | |||
391 | result_bb_getvar = runCmd('bitbake-getvar TEST_SET_FROM_ANON_FUNC --value') | ||
392 | bb_getvar_var_value = result_bb_getvar.output.strip() | ||
393 | self.assertEqual(bb_e_var_value, bb_getvar_var_value, | ||
394 | msg='''"bitbake -e" output differs from bitbake-getvar output for TEST_SET_FROM_ANON_FUNC (set from anonymous function) | ||
395 | bitbake -e: "%s" | ||
396 | bitbake-getvar: "%s"''' % (bb_e_var_value, bb_getvar_var_value)) | ||
diff --git a/meta/lib/oeqa/selftest/cases/binutils.py b/meta/lib/oeqa/selftest/cases/binutils.py index 1688eabe4e..5ff263d342 100644 --- a/meta/lib/oeqa/selftest/cases/binutils.py +++ b/meta/lib/oeqa/selftest/cases/binutils.py | |||
@@ -33,7 +33,7 @@ class BinutilsCrossSelfTest(OESelftestTestCase, OEPTestResultTestCase): | |||
33 | features.append('CHECK_TARGETS = "{0}"'.format(suite)) | 33 | features.append('CHECK_TARGETS = "{0}"'.format(suite)) |
34 | self.write_config("\n".join(features)) | 34 | self.write_config("\n".join(features)) |
35 | 35 | ||
36 | recipe = "binutils-cross-testsuite" | 36 | recipe = "binutils-testsuite" |
37 | bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe) | 37 | bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe) |
38 | builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"] | 38 | builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"] |
39 | 39 | ||
diff --git a/meta/lib/oeqa/selftest/cases/buildhistory.py b/meta/lib/oeqa/selftest/cases/buildhistory.py index 2d55994916..511c666554 100644 --- a/meta/lib/oeqa/selftest/cases/buildhistory.py +++ b/meta/lib/oeqa/selftest/cases/buildhistory.py | |||
@@ -9,10 +9,10 @@ import re | |||
9 | import datetime | 9 | import datetime |
10 | 10 | ||
11 | from oeqa.selftest.case import OESelftestTestCase | 11 | from oeqa.selftest.case import OESelftestTestCase |
12 | from oeqa.utils.commands import bitbake, get_bb_vars | 12 | from oeqa.utils.commands import bitbake, get_bb_vars, get_bb_var, runCmd |
13 | 13 | ||
14 | 14 | ||
15 | class BuildhistoryBase(OESelftestTestCase): | 15 | class BuildhistoryTests(OESelftestTestCase): |
16 | 16 | ||
17 | def config_buildhistory(self, tmp_bh_location=False): | 17 | def config_buildhistory(self, tmp_bh_location=False): |
18 | bb_vars = get_bb_vars(['USER_CLASSES', 'INHERIT']) | 18 | bb_vars = get_bb_vars(['USER_CLASSES', 'INHERIT']) |
@@ -48,5 +48,58 @@ class BuildhistoryBase(OESelftestTestCase): | |||
48 | else: | 48 | else: |
49 | self.assertEqual(result.status, 0, msg="Command 'bitbake %s' has failed unexpectedly: %s" % (target, result.output)) | 49 | self.assertEqual(result.status, 0, msg="Command 'bitbake %s' has failed unexpectedly: %s" % (target, result.output)) |
50 | 50 | ||
51 | # No tests should be added to the base class. | 51 | |
52 | # Please create a new class that inherit this one, or use one of those already available for adding tests. | 52 | def test_buildhistory_basic(self): |
53 | self.run_buildhistory_operation('xcursor-transparent-theme') | ||
54 | self.assertTrue(os.path.isdir(get_bb_var('BUILDHISTORY_DIR')), "buildhistory dir was not created.") | ||
55 | |||
56 | def test_buildhistory_buildtime_pr_backwards(self): | ||
57 | target = 'xcursor-transparent-theme' | ||
58 | error = "ERROR:.*QA Issue: Package version for package %s went backwards which would break package feeds \(from .*-r1.* to .*-r0.*\)" % target | ||
59 | self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True) | ||
60 | self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True, error_regex=error) | ||
61 | |||
62 | def test_fileinfo(self): | ||
63 | self.config_buildhistory() | ||
64 | bitbake('hicolor-icon-theme') | ||
65 | history_dir = get_bb_var('BUILDHISTORY_DIR_PACKAGE', 'hicolor-icon-theme') | ||
66 | self.assertTrue(os.path.isdir(history_dir), 'buildhistory dir was not created.') | ||
67 | |||
68 | def load_bh(f): | ||
69 | d = {} | ||
70 | for line in open(f): | ||
71 | split = [s.strip() for s in line.split('=', 1)] | ||
72 | if len(split) > 1: | ||
73 | d[split[0]] = split[1] | ||
74 | return d | ||
75 | |||
76 | data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme', 'latest')) | ||
77 | self.assertIn('FILELIST', data) | ||
78 | self.assertEqual(data['FILELIST'], '/usr/share/icons/hicolor/index.theme') | ||
79 | self.assertGreater(int(data['PKGSIZE']), 0) | ||
80 | |||
81 | data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme-dev', 'latest')) | ||
82 | if 'FILELIST' in data: | ||
83 | self.assertEqual(data['FILELIST'], '/usr/share/pkgconfig/default-icon-theme.pc') | ||
84 | self.assertGreater(int(data['PKGSIZE']), 0) | ||
85 | |||
86 | def test_buildhistory_diff(self): | ||
87 | target = 'xcursor-transparent-theme' | ||
88 | self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True) | ||
89 | self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True) | ||
90 | result = runCmd("oe-pkgdata-util read-value PKGV %s" % target) | ||
91 | pkgv = result.output.rstrip() | ||
92 | result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR')) | ||
93 | expected_endlines = [ | ||
94 | "xcursor-transparent-theme-dev: RRECOMMENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv), | ||
95 | "xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv) | ||
96 | ] | ||
97 | for line in result.output.splitlines(): | ||
98 | for el in expected_endlines: | ||
99 | if line.endswith(el): | ||
100 | expected_endlines.remove(el) | ||
101 | break | ||
102 | else: | ||
103 | self.fail('Unexpected line:\n%s\nExpected line endings:\n %s' % (line, '\n '.join(expected_endlines))) | ||
104 | if expected_endlines: | ||
105 | self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines)) \ No newline at end of file | ||
diff --git a/meta/lib/oeqa/selftest/cases/buildoptions.py b/meta/lib/oeqa/selftest/cases/buildoptions.py index 31dafaa9c5..767e19bd88 100644 --- a/meta/lib/oeqa/selftest/cases/buildoptions.py +++ b/meta/lib/oeqa/selftest/cases/buildoptions.py | |||
@@ -10,7 +10,6 @@ import glob as g | |||
10 | import shutil | 10 | import shutil |
11 | import tempfile | 11 | import tempfile |
12 | from oeqa.selftest.case import OESelftestTestCase | 12 | from oeqa.selftest.case import OESelftestTestCase |
13 | from oeqa.selftest.cases.buildhistory import BuildhistoryBase | ||
14 | from oeqa.core.decorator.data import skipIfMachine | 13 | from oeqa.core.decorator.data import skipIfMachine |
15 | from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars | 14 | from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars |
16 | import oeqa.utils.ftools as ftools | 15 | import oeqa.utils.ftools as ftools |
@@ -84,7 +83,7 @@ class SanityOptionsTest(OESelftestTestCase): | |||
84 | 83 | ||
85 | self.write_config("INHERIT:remove = \"report-error\"") | 84 | self.write_config("INHERIT:remove = \"report-error\"") |
86 | if "packages-list" not in get_bb_var("ERROR_QA"): | 85 | if "packages-list" not in get_bb_var("ERROR_QA"): |
87 | self.append_config("ERROR_QA:append = \" packages-list\"") | 86 | self.append_config("ERROR_QA:append:pn-xcursor-transparent-theme = \" packages-list\"") |
88 | 87 | ||
89 | self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') | 88 | self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') |
90 | self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme') | 89 | self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme') |
@@ -94,8 +93,8 @@ class SanityOptionsTest(OESelftestTestCase): | |||
94 | self.assertTrue(line and line.startswith("ERROR:"), msg=res.output) | 93 | self.assertTrue(line and line.startswith("ERROR:"), msg=res.output) |
95 | self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) | 94 | self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) |
96 | self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') | 95 | self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') |
97 | self.append_config('ERROR_QA:remove = "packages-list"') | 96 | self.append_config('ERROR_QA:remove:pn-xcursor-transparent-theme = "packages-list"') |
98 | self.append_config('WARN_QA:append = " packages-list"') | 97 | self.append_config('WARN_QA:append:pn-xcursor-transparent-theme = " packages-list"') |
99 | res = bitbake("xcursor-transparent-theme -f -c package") | 98 | res = bitbake("xcursor-transparent-theme -f -c package") |
100 | self.delete_recipeinc('xcursor-transparent-theme') | 99 | self.delete_recipeinc('xcursor-transparent-theme') |
101 | line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.") | 100 | line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.") |
@@ -139,43 +138,6 @@ class SanityOptionsTest(OESelftestTestCase): | |||
139 | 138 | ||
140 | self.assertNotIn(err, ret.output) | 139 | self.assertNotIn(err, ret.output) |
141 | 140 | ||
142 | |||
143 | class BuildhistoryTests(BuildhistoryBase): | ||
144 | |||
145 | def test_buildhistory_basic(self): | ||
146 | self.run_buildhistory_operation('xcursor-transparent-theme') | ||
147 | self.assertTrue(os.path.isdir(get_bb_var('BUILDHISTORY_DIR')), "buildhistory dir was not created.") | ||
148 | |||
149 | def test_buildhistory_buildtime_pr_backwards(self): | ||
150 | target = 'xcursor-transparent-theme' | ||
151 | error = "ERROR:.*QA Issue: Package version for package %s went backwards which would break package feeds \(from .*-r1.* to .*-r0.*\)" % target | ||
152 | self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True) | ||
153 | self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True, error_regex=error) | ||
154 | |||
155 | def test_fileinfo(self): | ||
156 | self.config_buildhistory() | ||
157 | bitbake('hicolor-icon-theme') | ||
158 | history_dir = get_bb_var('BUILDHISTORY_DIR_PACKAGE', 'hicolor-icon-theme') | ||
159 | self.assertTrue(os.path.isdir(history_dir), 'buildhistory dir was not created.') | ||
160 | |||
161 | def load_bh(f): | ||
162 | d = {} | ||
163 | for line in open(f): | ||
164 | split = [s.strip() for s in line.split('=', 1)] | ||
165 | if len(split) > 1: | ||
166 | d[split[0]] = split[1] | ||
167 | return d | ||
168 | |||
169 | data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme', 'latest')) | ||
170 | self.assertIn('FILELIST', data) | ||
171 | self.assertEqual(data['FILELIST'], '/usr/share/icons/hicolor/index.theme') | ||
172 | self.assertGreater(int(data['PKGSIZE']), 0) | ||
173 | |||
174 | data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme-dev', 'latest')) | ||
175 | if 'FILELIST' in data: | ||
176 | self.assertEqual(data['FILELIST'], '') | ||
177 | self.assertEqual(int(data['PKGSIZE']), 0) | ||
178 | |||
179 | class ArchiverTest(OESelftestTestCase): | 141 | class ArchiverTest(OESelftestTestCase): |
180 | def test_arch_work_dir_and_export_source(self): | 142 | def test_arch_work_dir_and_export_source(self): |
181 | """ | 143 | """ |
@@ -229,11 +191,10 @@ PREMIRRORS = "\\ | |||
229 | https://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n" | 191 | https://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n" |
230 | """) | 192 | """) |
231 | 193 | ||
232 | bitbake("world --runall fetch") | 194 | bitbake("world --runall fetch --continue") |
233 | 195 | ||
234 | 196 | ||
235 | class Poisoning(OESelftestTestCase): | 197 | class Poisoning(OESelftestTestCase): |
236 | def test_poisoning(self): | 198 | def test_poisoning(self): |
237 | res = bitbake("poison", ignore_status=True) | 199 | # The poison recipe fails if the poisoning didn't work |
238 | self.assertNotEqual(res.status, 0) | 200 | bitbake("poison") |
239 | self.assertTrue("is unsafe for cross-compilation" in res.output) | ||
diff --git a/meta/lib/oeqa/selftest/cases/containerimage.py b/meta/lib/oeqa/selftest/cases/containerimage.py index 23c0a1408a..d1ac305a84 100644 --- a/meta/lib/oeqa/selftest/cases/containerimage.py +++ b/meta/lib/oeqa/selftest/cases/containerimage.py | |||
@@ -42,7 +42,6 @@ class ContainerImageTests(OESelftestTestCase): | |||
42 | self.write_config("""PREFERRED_PROVIDER_virtual/kernel = "linux-dummy" | 42 | self.write_config("""PREFERRED_PROVIDER_virtual/kernel = "linux-dummy" |
43 | IMAGE_FSTYPES = "container" | 43 | IMAGE_FSTYPES = "container" |
44 | PACKAGE_CLASSES = "package_ipk" | 44 | PACKAGE_CLASSES = "package_ipk" |
45 | IMAGE_FEATURES = "" | ||
46 | IMAGE_BUILDINFO_FILE = "" | 45 | IMAGE_BUILDINFO_FILE = "" |
47 | INIT_MANAGER = "sysvinit" | 46 | INIT_MANAGER = "sysvinit" |
48 | IMAGE_INSTALL:remove = "ssh-pregen-hostkeys" | 47 | IMAGE_INSTALL:remove = "ssh-pregen-hostkeys" |
@@ -55,8 +54,6 @@ IMAGE_INSTALL:remove = "ssh-pregen-hostkeys" | |||
55 | expected_files = [ | 54 | expected_files = [ |
56 | './', | 55 | './', |
57 | '.{bindir}/theapp', | 56 | '.{bindir}/theapp', |
58 | '.{sysconfdir}/default/', | ||
59 | '.{sysconfdir}/default/postinst', | ||
60 | '.{sysconfdir}/ld.so.cache', | 57 | '.{sysconfdir}/ld.so.cache', |
61 | '.{sysconfdir}/timestamp', | 58 | '.{sysconfdir}/timestamp', |
62 | '.{sysconfdir}/version', | 59 | '.{sysconfdir}/version', |
diff --git a/meta/lib/oeqa/selftest/cases/cve_check.py b/meta/lib/oeqa/selftest/cases/cve_check.py index 60cecd1328..511e4b81b4 100644 --- a/meta/lib/oeqa/selftest/cases/cve_check.py +++ b/meta/lib/oeqa/selftest/cases/cve_check.py | |||
@@ -72,6 +72,259 @@ class CVECheck(OESelftestTestCase): | |||
72 | self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8") | 72 | self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8") |
73 | self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31") | 73 | self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31") |
74 | 74 | ||
75 | def test_product_match(self): | ||
76 | from oe.cve_check import has_cve_product_match | ||
77 | |||
78 | status = {} | ||
79 | status["detail"] = "ignored" | ||
80 | status["vendor"] = "*" | ||
81 | status["product"] = "*" | ||
82 | status["description"] = "" | ||
83 | status["mapping"] = "" | ||
84 | |||
85 | self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), True) | ||
86 | self.assertEqual(has_cve_product_match(status, "*:*"), True) | ||
87 | self.assertEqual(has_cve_product_match(status, "some_product"), True) | ||
88 | self.assertEqual(has_cve_product_match(status, "glibc"), True) | ||
89 | self.assertEqual(has_cve_product_match(status, "glibca"), True) | ||
90 | self.assertEqual(has_cve_product_match(status, "aglibc"), True) | ||
91 | self.assertEqual(has_cve_product_match(status, "*"), True) | ||
92 | self.assertEqual(has_cve_product_match(status, "aglibc glibc test:test"), True) | ||
93 | |||
94 | status["product"] = "glibc" | ||
95 | self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), False) | ||
96 | # The CPE in the recipe must be defined, no * accepted | ||
97 | self.assertEqual(has_cve_product_match(status, "*:*"), False) | ||
98 | self.assertEqual(has_cve_product_match(status, "*"), False) | ||
99 | self.assertEqual(has_cve_product_match(status, "some_product"), False) | ||
100 | self.assertEqual(has_cve_product_match(status, "glibc"), True) | ||
101 | self.assertEqual(has_cve_product_match(status, "glibca"), False) | ||
102 | self.assertEqual(has_cve_product_match(status, "aglibc"), False) | ||
103 | self.assertEqual(has_cve_product_match(status, "some_vendor:glibc"), True) | ||
104 | self.assertEqual(has_cve_product_match(status, "some_vendor:glibc test"), True) | ||
105 | self.assertEqual(has_cve_product_match(status, "test some_vendor:glibc"), True) | ||
106 | |||
107 | status["vendor"] = "glibca" | ||
108 | status["product"] = "glibc" | ||
109 | self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), False) | ||
110 | # The CPE in the recipe must be defined, no * accepted | ||
111 | self.assertEqual(has_cve_product_match(status, "*:*"), False) | ||
112 | self.assertEqual(has_cve_product_match(status, "*"), False) | ||
113 | self.assertEqual(has_cve_product_match(status, "some_product"), False) | ||
114 | self.assertEqual(has_cve_product_match(status, "glibc"), False) | ||
115 | self.assertEqual(has_cve_product_match(status, "glibca"), False) | ||
116 | self.assertEqual(has_cve_product_match(status, "aglibc"), False) | ||
117 | self.assertEqual(has_cve_product_match(status, "some_vendor:glibc"), False) | ||
118 | self.assertEqual(has_cve_product_match(status, "glibca:glibc"), True) | ||
119 | self.assertEqual(has_cve_product_match(status, "test:test glibca:glibc"), True) | ||
120 | self.assertEqual(has_cve_product_match(status, "test glibca:glibc"), True) | ||
121 | self.assertEqual(has_cve_product_match(status, "glibca:glibc test"), True) | ||
122 | |||
123 | def test_parse_cve_from_patch_filename(self): | ||
124 | from oe.cve_check import parse_cve_from_filename | ||
125 | |||
126 | # Patch filename without CVE ID | ||
127 | self.assertEqual(parse_cve_from_filename("0001-test.patch"), "") | ||
128 | |||
129 | # Patch with single CVE ID | ||
130 | self.assertEqual( | ||
131 | parse_cve_from_filename("CVE-2022-12345.patch"), "CVE-2022-12345" | ||
132 | ) | ||
133 | |||
134 | # Patch with multiple CVE IDs | ||
135 | self.assertEqual( | ||
136 | parse_cve_from_filename("CVE-2022-41741-CVE-2022-41742.patch"), | ||
137 | "CVE-2022-41742", | ||
138 | ) | ||
139 | |||
140 | # Patches with CVE ID and appended text | ||
141 | self.assertEqual( | ||
142 | parse_cve_from_filename("CVE-2023-3019-0001.patch"), "CVE-2023-3019" | ||
143 | ) | ||
144 | self.assertEqual( | ||
145 | parse_cve_from_filename("CVE-2024-21886-1.patch"), "CVE-2024-21886" | ||
146 | ) | ||
147 | |||
148 | # Patch with CVE ID and prepended text | ||
149 | self.assertEqual( | ||
150 | parse_cve_from_filename("grep-CVE-2012-5667.patch"), "CVE-2012-5667" | ||
151 | ) | ||
152 | self.assertEqual( | ||
153 | parse_cve_from_filename("0001-CVE-2012-5667.patch"), "CVE-2012-5667" | ||
154 | ) | ||
155 | |||
156 | # Patch with CVE ID and both prepended and appended text | ||
157 | self.assertEqual( | ||
158 | parse_cve_from_filename( | ||
159 | "0001-tpm2_import-fix-fixed-AES-key-CVE-2021-3565-0001.patch" | ||
160 | ), | ||
161 | "CVE-2021-3565", | ||
162 | ) | ||
163 | |||
164 | # Only grab the last CVE ID in the filename | ||
165 | self.assertEqual( | ||
166 | parse_cve_from_filename("CVE-2012-5667-CVE-2012-5668.patch"), | ||
167 | "CVE-2012-5668", | ||
168 | ) | ||
169 | |||
170 | # Test invalid CVE ID with incorrect length (must be at least 4 digits) | ||
171 | self.assertEqual( | ||
172 | parse_cve_from_filename("CVE-2024-001.patch"), | ||
173 | "", | ||
174 | ) | ||
175 | |||
176 | # Test valid CVE ID with very long length | ||
177 | self.assertEqual( | ||
178 | parse_cve_from_filename("CVE-2024-0000000000000000000000001.patch"), | ||
179 | "CVE-2024-0000000000000000000000001", | ||
180 | ) | ||
181 | |||
182 | def test_parse_cve_from_patch_contents(self): | ||
183 | import textwrap | ||
184 | from oe.cve_check import parse_cves_from_patch_contents | ||
185 | |||
186 | # Standard patch file excerpt without any patches | ||
187 | self.assertEqual( | ||
188 | parse_cves_from_patch_contents( | ||
189 | textwrap.dedent("""\ | ||
190 | remove "*" for root since we don't have a /etc/shadow so far. | ||
191 | |||
192 | Upstream-Status: Inappropriate [configuration] | ||
193 | |||
194 | Signed-off-by: Scott Garman <scott.a.garman@intel.com> | ||
195 | |||
196 | --- base-passwd/passwd.master~nobash | ||
197 | +++ base-passwd/passwd.master | ||
198 | @@ -1,4 +1,4 @@ | ||
199 | -root:*:0:0:root:/root:/bin/sh | ||
200 | +root::0:0:root:/root:/bin/sh | ||
201 | daemon:*:1:1:daemon:/usr/sbin:/bin/sh | ||
202 | bin:*:2:2:bin:/bin:/bin/sh | ||
203 | sys:*:3:3:sys:/dev:/bin/sh | ||
204 | """) | ||
205 | ), | ||
206 | set(), | ||
207 | ) | ||
208 | |||
209 | # Patch file with multiple CVE IDs (space-separated) | ||
210 | self.assertEqual( | ||
211 | parse_cves_from_patch_contents( | ||
212 | textwrap.dedent("""\ | ||
213 | There is an assertion in function _cairo_arc_in_direction(). | ||
214 | |||
215 | CVE: CVE-2019-6461 CVE-2019-6462 | ||
216 | Upstream-Status: Pending | ||
217 | Signed-off-by: Ross Burton <ross.burton@intel.com> | ||
218 | |||
219 | diff --git a/src/cairo-arc.c b/src/cairo-arc.c | ||
220 | index 390397bae..1bde774a4 100644 | ||
221 | --- a/src/cairo-arc.c | ||
222 | +++ b/src/cairo-arc.c | ||
223 | @@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr, | ||
224 | if (cairo_status (cr)) | ||
225 | return; | ||
226 | |||
227 | - assert (angle_max >= angle_min); | ||
228 | + if (angle_max < angle_min) | ||
229 | + return; | ||
230 | |||
231 | if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) { | ||
232 | angle_max = fmod (angle_max - angle_min, 2 * M_PI); | ||
233 | """), | ||
234 | ), | ||
235 | {"CVE-2019-6461", "CVE-2019-6462"}, | ||
236 | ) | ||
237 | |||
238 | # Patch file with multiple CVE IDs (comma-separated w/ both space and no space) | ||
239 | self.assertEqual( | ||
240 | parse_cves_from_patch_contents( | ||
241 | textwrap.dedent("""\ | ||
242 | There is an assertion in function _cairo_arc_in_direction(). | ||
243 | |||
244 | CVE: CVE-2019-6461,CVE-2019-6462, CVE-2019-6463 | ||
245 | Upstream-Status: Pending | ||
246 | Signed-off-by: Ross Burton <ross.burton@intel.com> | ||
247 | |||
248 | diff --git a/src/cairo-arc.c b/src/cairo-arc.c | ||
249 | index 390397bae..1bde774a4 100644 | ||
250 | --- a/src/cairo-arc.c | ||
251 | +++ b/src/cairo-arc.c | ||
252 | @@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr, | ||
253 | if (cairo_status (cr)) | ||
254 | return; | ||
255 | |||
256 | - assert (angle_max >= angle_min); | ||
257 | + if (angle_max < angle_min) | ||
258 | + return; | ||
259 | |||
260 | if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) { | ||
261 | angle_max = fmod (angle_max - angle_min, 2 * M_PI); | ||
262 | |||
263 | """), | ||
264 | ), | ||
265 | {"CVE-2019-6461", "CVE-2019-6462", "CVE-2019-6463"}, | ||
266 | ) | ||
267 | |||
268 | # Patch file with multiple CVE IDs (&-separated) | ||
269 | self.assertEqual( | ||
270 | parse_cves_from_patch_contents( | ||
271 | textwrap.dedent("""\ | ||
272 | There is an assertion in function _cairo_arc_in_direction(). | ||
273 | |||
274 | CVE: CVE-2019-6461 & CVE-2019-6462 | ||
275 | Upstream-Status: Pending | ||
276 | Signed-off-by: Ross Burton <ross.burton@intel.com> | ||
277 | |||
278 | diff --git a/src/cairo-arc.c b/src/cairo-arc.c | ||
279 | index 390397bae..1bde774a4 100644 | ||
280 | --- a/src/cairo-arc.c | ||
281 | +++ b/src/cairo-arc.c | ||
282 | @@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr, | ||
283 | if (cairo_status (cr)) | ||
284 | return; | ||
285 | |||
286 | - assert (angle_max >= angle_min); | ||
287 | + if (angle_max < angle_min) | ||
288 | + return; | ||
289 | |||
290 | if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) { | ||
291 | angle_max = fmod (angle_max - angle_min, 2 * M_PI); | ||
292 | """), | ||
293 | ), | ||
294 | {"CVE-2019-6461", "CVE-2019-6462"}, | ||
295 | ) | ||
296 | |||
297 | # Patch file with multiple lines with CVE IDs | ||
298 | self.assertEqual( | ||
299 | parse_cves_from_patch_contents( | ||
300 | textwrap.dedent("""\ | ||
301 | There is an assertion in function _cairo_arc_in_direction(). | ||
302 | |||
303 | CVE: CVE-2019-6461 & CVE-2019-6462 | ||
304 | |||
305 | CVE: CVE-2019-6463 & CVE-2019-6464 | ||
306 | Upstream-Status: Pending | ||
307 | Signed-off-by: Ross Burton <ross.burton@intel.com> | ||
308 | |||
309 | diff --git a/src/cairo-arc.c b/src/cairo-arc.c | ||
310 | index 390397bae..1bde774a4 100644 | ||
311 | --- a/src/cairo-arc.c | ||
312 | +++ b/src/cairo-arc.c | ||
313 | @@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr, | ||
314 | if (cairo_status (cr)) | ||
315 | return; | ||
316 | |||
317 | - assert (angle_max >= angle_min); | ||
318 | + if (angle_max < angle_min) | ||
319 | + return; | ||
320 | |||
321 | if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) { | ||
322 | angle_max = fmod (angle_max - angle_min, 2 * M_PI); | ||
323 | |||
324 | """), | ||
325 | ), | ||
326 | {"CVE-2019-6461", "CVE-2019-6462", "CVE-2019-6463", "CVE-2019-6464"}, | ||
327 | ) | ||
75 | 328 | ||
76 | def test_recipe_report_json(self): | 329 | def test_recipe_report_json(self): |
77 | config = """ | 330 | config = """ |
@@ -217,9 +470,10 @@ CVE_CHECK_REPORT_PATCHED = "1" | |||
217 | # m4 CVE should not be in logrotate | 470 | # m4 CVE should not be in logrotate |
218 | self.assertNotIn("CVE-2008-1687", found_cves) | 471 | self.assertNotIn("CVE-2008-1687", found_cves) |
219 | # logrotate has both Patched and Ignored CVEs | 472 | # logrotate has both Patched and Ignored CVEs |
473 | detail = "version-not-in-range" | ||
220 | self.assertIn("CVE-2011-1098", found_cves) | 474 | self.assertIn("CVE-2011-1098", found_cves) |
221 | self.assertEqual(found_cves["CVE-2011-1098"]["status"], "Patched") | 475 | self.assertEqual(found_cves["CVE-2011-1098"]["status"], "Patched") |
222 | self.assertEqual(len(found_cves["CVE-2011-1098"]["detail"]), 0) | 476 | self.assertEqual(found_cves["CVE-2011-1098"]["detail"], detail) |
223 | self.assertEqual(len(found_cves["CVE-2011-1098"]["description"]), 0) | 477 | self.assertEqual(len(found_cves["CVE-2011-1098"]["description"]), 0) |
224 | detail = "not-applicable-platform" | 478 | detail = "not-applicable-platform" |
225 | description = "CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used" | 479 | description = "CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used" |
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py index c8bf7d9e44..05f228f03e 100644 --- a/meta/lib/oeqa/selftest/cases/devtool.py +++ b/meta/lib/oeqa/selftest/cases/devtool.py | |||
@@ -64,11 +64,15 @@ def setUpModule(): | |||
64 | # under COREBASE and we don't want to copy that, so we have | 64 | # under COREBASE and we don't want to copy that, so we have |
65 | # to be selective. | 65 | # to be selective. |
66 | result = runCmd('git status --porcelain', cwd=oldreporoot) | 66 | result = runCmd('git status --porcelain', cwd=oldreporoot) |
67 | |||
68 | # Also copy modifications to the 'scripts/' directory | ||
69 | canonical_layerpath_scripts = os.path.normpath(canonical_layerpath + "../scripts") | ||
70 | |||
67 | for line in result.output.splitlines(): | 71 | for line in result.output.splitlines(): |
68 | if line.startswith(' M ') or line.startswith('?? '): | 72 | if line.startswith(' M ') or line.startswith('?? '): |
69 | relpth = line.split()[1] | 73 | relpth = line.split()[1] |
70 | pth = os.path.join(oldreporoot, relpth) | 74 | pth = os.path.join(oldreporoot, relpth) |
71 | if pth.startswith(canonical_layerpath): | 75 | if pth.startswith(canonical_layerpath) or pth.startswith(canonical_layerpath_scripts): |
72 | if relpth.endswith('/'): | 76 | if relpth.endswith('/'): |
73 | destdir = os.path.join(corecopydir, relpth) | 77 | destdir = os.path.join(corecopydir, relpth) |
74 | # avoid race condition by not copying .pyc files YPBZ#13421,13803 | 78 | # avoid race condition by not copying .pyc files YPBZ#13421,13803 |
@@ -150,7 +154,7 @@ class DevtoolTestCase(OESelftestTestCase): | |||
150 | value = invalue | 154 | value = invalue |
151 | invar = None | 155 | invar = None |
152 | elif '=' in line: | 156 | elif '=' in line: |
153 | splitline = line.split('=', 1) | 157 | splitline = re.split(r"[?+:]*=[+]?", line, 1) |
154 | var = splitline[0].rstrip() | 158 | var = splitline[0].rstrip() |
155 | value = splitline[1].strip().strip('"') | 159 | value = splitline[1].strip().strip('"') |
156 | if value.endswith('\\'): | 160 | if value.endswith('\\'): |
@@ -317,7 +321,7 @@ class DevtoolBase(DevtoolTestCase): | |||
317 | cls.sstate_conf = 'SSTATE_DIR = "%s"\n' % cls.devtool_sstate | 321 | cls.sstate_conf = 'SSTATE_DIR = "%s"\n' % cls.devtool_sstate |
318 | cls.sstate_conf += ('SSTATE_MIRRORS += "file://.* file:///%s/PATH"\n' | 322 | cls.sstate_conf += ('SSTATE_MIRRORS += "file://.* file:///%s/PATH"\n' |
319 | % cls.original_sstate) | 323 | % cls.original_sstate) |
320 | cls.sstate_conf += ('BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"\n') | 324 | cls.sstate_conf += ('BB_HASHSERVE_UPSTREAM = "hashserv.yoctoproject.org:8686"\n') |
321 | 325 | ||
322 | @classmethod | 326 | @classmethod |
323 | def tearDownClass(cls): | 327 | def tearDownClass(cls): |
@@ -465,7 +469,7 @@ class DevtoolAddTests(DevtoolBase): | |||
465 | checkvars = {} | 469 | checkvars = {} |
466 | checkvars['LICENSE'] = 'GPL-2.0-only' | 470 | checkvars['LICENSE'] = 'GPL-2.0-only' |
467 | checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' | 471 | checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' |
468 | checkvars['S'] = '${WORKDIR}/git' | 472 | checkvars['S'] = None |
469 | checkvars['PV'] = '0.1+git' | 473 | checkvars['PV'] = '0.1+git' |
470 | checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https;branch=master' | 474 | checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https;branch=master' |
471 | checkvars['SRCREV'] = srcrev | 475 | checkvars['SRCREV'] = srcrev |
@@ -561,7 +565,7 @@ class DevtoolAddTests(DevtoolBase): | |||
561 | recipefile = get_bb_var('FILE', testrecipe) | 565 | recipefile = get_bb_var('FILE', testrecipe) |
562 | self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named') | 566 | self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named') |
563 | checkvars = {} | 567 | checkvars = {} |
564 | checkvars['S'] = '${WORKDIR}/MarkupSafe-${PV}' | 568 | checkvars['S'] = '${UNPACKDIR}/MarkupSafe-${PV}' |
565 | checkvars['SRC_URI'] = url.replace(testver, '${PV}') | 569 | checkvars['SRC_URI'] = url.replace(testver, '${PV}') |
566 | self._test_recipe_contents(recipefile, checkvars, []) | 570 | self._test_recipe_contents(recipefile, checkvars, []) |
567 | # Try with version specified | 571 | # Try with version specified |
@@ -578,7 +582,7 @@ class DevtoolAddTests(DevtoolBase): | |||
578 | recipefile = get_bb_var('FILE', testrecipe) | 582 | recipefile = get_bb_var('FILE', testrecipe) |
579 | self.assertIn('%s_%s.bb' % (testrecipe, fakever), recipefile, 'Recipe file incorrectly named') | 583 | self.assertIn('%s_%s.bb' % (testrecipe, fakever), recipefile, 'Recipe file incorrectly named') |
580 | checkvars = {} | 584 | checkvars = {} |
581 | checkvars['S'] = '${WORKDIR}/MarkupSafe-%s' % testver | 585 | checkvars['S'] = '${UNPACKDIR}/MarkupSafe-%s' % testver |
582 | checkvars['SRC_URI'] = url | 586 | checkvars['SRC_URI'] = url |
583 | self._test_recipe_contents(recipefile, checkvars, []) | 587 | self._test_recipe_contents(recipefile, checkvars, []) |
584 | 588 | ||
@@ -605,7 +609,7 @@ class DevtoolAddTests(DevtoolBase): | |||
605 | recipefile = get_bb_var('FILE', testrecipe) | 609 | recipefile = get_bb_var('FILE', testrecipe) |
606 | self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') | 610 | self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') |
607 | checkvars = {} | 611 | checkvars = {} |
608 | checkvars['S'] = '${WORKDIR}/git' | 612 | checkvars['S'] = None |
609 | checkvars['PV'] = '1.0+git' | 613 | checkvars['PV'] = '1.0+git' |
610 | checkvars['SRC_URI'] = url_branch | 614 | checkvars['SRC_URI'] = url_branch |
611 | checkvars['SRCREV'] = '${AUTOREV}' | 615 | checkvars['SRCREV'] = '${AUTOREV}' |
@@ -624,7 +628,7 @@ class DevtoolAddTests(DevtoolBase): | |||
624 | recipefile = get_bb_var('FILE', testrecipe) | 628 | recipefile = get_bb_var('FILE', testrecipe) |
625 | self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') | 629 | self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') |
626 | checkvars = {} | 630 | checkvars = {} |
627 | checkvars['S'] = '${WORKDIR}/git' | 631 | checkvars['S'] = None |
628 | checkvars['PV'] = '1.5+git' | 632 | checkvars['PV'] = '1.5+git' |
629 | checkvars['SRC_URI'] = url_branch | 633 | checkvars['SRC_URI'] = url_branch |
630 | checkvars['SRCREV'] = checkrev | 634 | checkvars['SRCREV'] = checkrev |
@@ -753,6 +757,25 @@ class DevtoolModifyTests(DevtoolBase): | |||
753 | result = runCmd('devtool status') | 757 | result = runCmd('devtool status') |
754 | self.assertNotIn('mdadm', result.output) | 758 | self.assertNotIn('mdadm', result.output) |
755 | 759 | ||
760 | def test_devtool_modify_go(self): | ||
761 | import oe.path | ||
762 | from tempfile import TemporaryDirectory | ||
763 | with TemporaryDirectory(prefix='devtoolqa') as tempdir: | ||
764 | self.track_for_cleanup(self.workspacedir) | ||
765 | self.add_command_to_tearDown('bitbake -c clean go-helloworld') | ||
766 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
767 | result = runCmd('devtool modify go-helloworld -x %s' % tempdir) | ||
768 | self.assertExists( | ||
769 | oe.path.join(tempdir, 'src', 'golang.org', 'x', 'example', 'go.mod'), | ||
770 | 'Extracted source could not be found' | ||
771 | ) | ||
772 | self.assertExists( | ||
773 | oe.path.join(self.workspacedir, 'conf', 'layer.conf'), | ||
774 | 'Workspace directory not created' | ||
775 | ) | ||
776 | matches = glob.glob(oe.path.join(self.workspacedir, 'appends', 'go-helloworld_*.bbappend')) | ||
777 | self.assertTrue(matches, 'bbappend not created %s' % result.output) | ||
778 | |||
756 | def test_devtool_buildclean(self): | 779 | def test_devtool_buildclean(self): |
757 | def assertFile(path, *paths): | 780 | def assertFile(path, *paths): |
758 | f = os.path.join(path, *paths) | 781 | f = os.path.join(path, *paths) |
@@ -1004,7 +1027,7 @@ class DevtoolModifyTests(DevtoolBase): | |||
1004 | # Configure the recipe to check that the git dependencies are correctly patched in cargo config | 1027 | # Configure the recipe to check that the git dependencies are correctly patched in cargo config |
1005 | bitbake('-c configure %s' % testrecipe) | 1028 | bitbake('-c configure %s' % testrecipe) |
1006 | 1029 | ||
1007 | cargo_config_path = os.path.join(cargo_home, 'config') | 1030 | cargo_config_path = os.path.join(cargo_home, 'config.toml') |
1008 | with open(cargo_config_path, "r") as f: | 1031 | with open(cargo_config_path, "r") as f: |
1009 | cargo_config_contents = [line.strip('\n') for line in f.readlines()] | 1032 | cargo_config_contents = [line.strip('\n') for line in f.readlines()] |
1010 | 1033 | ||
@@ -1162,13 +1185,16 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1162 | result = runCmd('echo "A new file" > devtool-new-file', cwd=tempdir) | 1185 | result = runCmd('echo "A new file" > devtool-new-file', cwd=tempdir) |
1163 | result = runCmd('git add devtool-new-file', cwd=tempdir) | 1186 | result = runCmd('git add devtool-new-file', cwd=tempdir) |
1164 | result = runCmd('git commit -m "Add a new file"', cwd=tempdir) | 1187 | result = runCmd('git commit -m "Add a new file"', cwd=tempdir) |
1165 | self.add_command_to_tearDown('cd %s; rm %s/*.patch; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) | 1188 | cleanup_cmd = 'cd %s; rm %s/*.patch; git add %s; git checkout %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)) |
1189 | self.add_command_to_tearDown(cleanup_cmd) | ||
1166 | result = runCmd('devtool update-recipe %s' % testrecipe) | 1190 | result = runCmd('devtool update-recipe %s' % testrecipe) |
1167 | result = runCmd('git add minicom', cwd=os.path.dirname(recipefile)) | 1191 | result = runCmd('git add minicom', cwd=os.path.dirname(recipefile)) |
1168 | expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), | 1192 | expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), |
1169 | ('A ', '.*/0001-Change-the-README.patch$'), | 1193 | ('A ', '.*/0001-Change-the-README.patch$'), |
1170 | ('A ', '.*/0002-Add-a-new-file.patch$')] | 1194 | ('A ', '.*/0002-Add-a-new-file.patch$')] |
1171 | self._check_repo_status(os.path.dirname(recipefile), expected_status) | 1195 | self._check_repo_status(os.path.dirname(recipefile), expected_status) |
1196 | result = runCmd(cleanup_cmd) | ||
1197 | self._check_repo_status(os.path.dirname(recipefile), []) | ||
1172 | 1198 | ||
1173 | def test_devtool_update_recipe_git(self): | 1199 | def test_devtool_update_recipe_git(self): |
1174 | # Check preconditions | 1200 | # Check preconditions |
@@ -1225,7 +1251,7 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1225 | 1251 | ||
1226 | def test_devtool_update_recipe_append(self): | 1252 | def test_devtool_update_recipe_append(self): |
1227 | # Check preconditions | 1253 | # Check preconditions |
1228 | testrecipe = 'mdadm' | 1254 | testrecipe = 'minicom' |
1229 | bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) | 1255 | bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) |
1230 | recipefile = bb_vars['FILE'] | 1256 | recipefile = bb_vars['FILE'] |
1231 | src_uri = bb_vars['SRC_URI'] | 1257 | src_uri = bb_vars['SRC_URI'] |
@@ -1243,7 +1269,7 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1243 | # Check git repo | 1269 | # Check git repo |
1244 | self._check_src_repo(tempsrcdir) | 1270 | self._check_src_repo(tempsrcdir) |
1245 | # Add a commit | 1271 | # Add a commit |
1246 | result = runCmd("sed 's!\\(#define VERSION\\W*\"[^\"]*\\)\"!\\1-custom\"!' -i ReadMe.c", cwd=tempsrcdir) | 1272 | result = runCmd('echo "Additional line" >> README', cwd=tempsrcdir) |
1247 | result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir) | 1273 | result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir) |
1248 | self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (os.path.dirname(recipefile), testrecipe)) | 1274 | self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (os.path.dirname(recipefile), testrecipe)) |
1249 | # Create a temporary layer and add it to bblayers.conf | 1275 | # Create a temporary layer and add it to bblayers.conf |
@@ -1282,7 +1308,7 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1282 | self.assertEqual(expectedlines2, f.readlines()) | 1308 | self.assertEqual(expectedlines2, f.readlines()) |
1283 | # Put commit back and check we can run it if layer isn't in bblayers.conf | 1309 | # Put commit back and check we can run it if layer isn't in bblayers.conf |
1284 | os.remove(bbappendfile) | 1310 | os.remove(bbappendfile) |
1285 | result = runCmd("sed 's!\\(#define VERSION\\W*\"[^\"]*\\)\"!\\1-custom\"!' -i ReadMe.c", cwd=tempsrcdir) | 1311 | result = runCmd('echo "Additional line" >> README', cwd=tempsrcdir) |
1286 | result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir) | 1312 | result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir) |
1287 | result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir) | 1313 | result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir) |
1288 | result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) | 1314 | result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) |
@@ -1601,12 +1627,12 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1601 | # Check preconditions | 1627 | # Check preconditions |
1602 | testrecipe = 'dos2unix' | 1628 | testrecipe = 'dos2unix' |
1603 | self.append_config('ERROR_QA:remove:pn-dos2unix = "patch-status"\n') | 1629 | self.append_config('ERROR_QA:remove:pn-dos2unix = "patch-status"\n') |
1604 | bb_vars = get_bb_vars(['SRC_URI', 'S', 'WORKDIR', 'FILE'], testrecipe) | 1630 | bb_vars = get_bb_vars(['SRC_URI', 'S', 'UNPACKDIR', 'FILE', 'BB_GIT_DEFAULT_DESTSUFFIX'], testrecipe) |
1605 | self.assertIn('git://', bb_vars['SRC_URI'], 'This test expects the %s recipe to be a git recipe' % testrecipe) | 1631 | self.assertIn('git://', bb_vars['SRC_URI'], 'This test expects the %s recipe to be a git recipe' % testrecipe) |
1606 | workdir_git = '%s/git/' % bb_vars['WORKDIR'] | 1632 | unpackdir_git = '%s/%s/' % (bb_vars['UNPACKDIR'], bb_vars['BB_GIT_DEFAULT_DESTSUFFIX']) |
1607 | if not bb_vars['S'].startswith(workdir_git): | 1633 | if not bb_vars['S'].startswith(unpackdir_git): |
1608 | self.fail('This test expects the %s recipe to be building from a subdirectory of the git repo' % testrecipe) | 1634 | self.fail('This test expects the %s recipe to be building from a subdirectory of the git repo' % testrecipe) |
1609 | subdir = bb_vars['S'].split(workdir_git, 1)[1] | 1635 | subdir = bb_vars['S'].split(unpackdir_git, 1)[1] |
1610 | # Clean up anything in the workdir/sysroot/sstate cache | 1636 | # Clean up anything in the workdir/sysroot/sstate cache |
1611 | bitbake('%s -c cleansstate' % testrecipe) | 1637 | bitbake('%s -c cleansstate' % testrecipe) |
1612 | # Try modifying a recipe | 1638 | # Try modifying a recipe |
@@ -1734,6 +1760,8 @@ class DevtoolExtractTests(DevtoolBase): | |||
1734 | self.assertExists(os.path.join(tempdir, 'Makefile.am'), 'Extracted source could not be found') | 1760 | self.assertExists(os.path.join(tempdir, 'Makefile.am'), 'Extracted source could not be found') |
1735 | self._check_src_repo(tempdir) | 1761 | self._check_src_repo(tempdir) |
1736 | 1762 | ||
1763 | class DevtoolResetTests(DevtoolBase): | ||
1764 | |||
1737 | def test_devtool_reset_all(self): | 1765 | def test_devtool_reset_all(self): |
1738 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | 1766 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') |
1739 | self.track_for_cleanup(tempdir) | 1767 | self.track_for_cleanup(tempdir) |
@@ -1760,6 +1788,21 @@ class DevtoolExtractTests(DevtoolBase): | |||
1760 | matches2 = glob.glob(stampprefix2 + '*') | 1788 | matches2 = glob.glob(stampprefix2 + '*') |
1761 | self.assertFalse(matches2, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe2) | 1789 | self.assertFalse(matches2, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe2) |
1762 | 1790 | ||
1791 | def test_devtool_reset_re_plus_plus(self): | ||
1792 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | ||
1793 | self.track_for_cleanup(tempdir) | ||
1794 | self.track_for_cleanup(self.workspacedir) | ||
1795 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
1796 | testrecipe = 'devtool-test-reset-re++' | ||
1797 | result = runCmd('devtool modify %s' % testrecipe) | ||
1798 | result = runCmd('devtool reset -n %s' % testrecipe) | ||
1799 | self.assertIn(testrecipe, result.output) | ||
1800 | result = runCmd('devtool status') | ||
1801 | self.assertNotIn(testrecipe, result.output) | ||
1802 | self.assertNotExists(os.path.join(self.workspacedir, 'recipes', testrecipe), 'Recipe directory should not exist after resetting') | ||
1803 | |||
1804 | class DevtoolDeployTargetTests(DevtoolBase): | ||
1805 | |||
1763 | @OETestTag("runqemu") | 1806 | @OETestTag("runqemu") |
1764 | def test_devtool_deploy_target(self): | 1807 | def test_devtool_deploy_target(self): |
1765 | self._check_runqemu_prerequisites() | 1808 | self._check_runqemu_prerequisites() |
@@ -1827,6 +1870,8 @@ class DevtoolExtractTests(DevtoolBase): | |||
1827 | result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand), ignore_status=True) | 1870 | result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand), ignore_status=True) |
1828 | self.assertNotEqual(result, 0, 'undeploy-target did not remove command as it should have') | 1871 | self.assertNotEqual(result, 0, 'undeploy-target did not remove command as it should have') |
1829 | 1872 | ||
1873 | class DevtoolBuildImageTests(DevtoolBase): | ||
1874 | |||
1830 | def test_devtool_build_image(self): | 1875 | def test_devtool_build_image(self): |
1831 | """Test devtool build-image plugin""" | 1876 | """Test devtool build-image plugin""" |
1832 | # Check preconditions | 1877 | # Check preconditions |
@@ -1998,6 +2043,52 @@ class DevtoolUpgradeTests(DevtoolBase): | |||
1998 | newlines = f.readlines() | 2043 | newlines = f.readlines() |
1999 | self.assertEqual(desiredlines, newlines) | 2044 | self.assertEqual(desiredlines, newlines) |
2000 | 2045 | ||
2046 | def test_devtool_upgrade_recipe_upgrade_extra_tasks(self): | ||
2047 | # Check preconditions | ||
2048 | self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') | ||
2049 | self.track_for_cleanup(self.workspacedir) | ||
2050 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
2051 | recipe = 'python3-guessing-game' | ||
2052 | version = '0.2.0' | ||
2053 | commit = '40cf004c2772ffa20ea803fa3be1528a75be3e98' | ||
2054 | oldrecipefile = get_bb_var('FILE', recipe) | ||
2055 | oldcratesincfile = os.path.join(os.path.dirname(oldrecipefile), os.path.basename(oldrecipefile).strip('_git.bb') + '-crates.inc') | ||
2056 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | ||
2057 | self.track_for_cleanup(tempdir) | ||
2058 | # Check that recipe is not already under devtool control | ||
2059 | result = runCmd('devtool status') | ||
2060 | self.assertNotIn(recipe, result.output) | ||
2061 | # Check upgrade | ||
2062 | result = runCmd('devtool upgrade %s %s --version %s --srcrev %s' % (recipe, tempdir, version, commit)) | ||
2063 | # Check if srctree at least is populated | ||
2064 | self.assertTrue(len(os.listdir(tempdir)) > 0, 'srctree (%s) should be populated with new (%s) source code' % (tempdir, commit)) | ||
2065 | # Check new recipe file and new -crates.inc files are present | ||
2066 | newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, os.path.basename(oldrecipefile)) | ||
2067 | newcratesincfile = os.path.join(self.workspacedir, 'recipes', recipe, os.path.basename(oldcratesincfile)) | ||
2068 | self.assertExists(newrecipefile, 'Recipe file should exist after upgrade') | ||
2069 | self.assertExists(newcratesincfile, 'Recipe crates.inc file should exist after upgrade') | ||
2070 | # Check devtool status and make sure recipe is present | ||
2071 | result = runCmd('devtool status') | ||
2072 | self.assertIn(recipe, result.output) | ||
2073 | self.assertIn(tempdir, result.output) | ||
2074 | # Check recipe got changed as expected | ||
2075 | with open(oldrecipefile + '.upgraded', 'r') as f: | ||
2076 | desiredlines = f.readlines() | ||
2077 | with open(newrecipefile, 'r') as f: | ||
2078 | newlines = f.readlines() | ||
2079 | self.assertEqual(desiredlines, newlines) | ||
2080 | # Check crates.inc got changed as expected | ||
2081 | with open(oldcratesincfile + '.upgraded', 'r') as f: | ||
2082 | desiredlines = f.readlines() | ||
2083 | with open(newcratesincfile, 'r') as f: | ||
2084 | newlines = f.readlines() | ||
2085 | self.assertEqual(desiredlines, newlines) | ||
2086 | # Check devtool reset recipe | ||
2087 | result = runCmd('devtool reset %s -n' % recipe) | ||
2088 | result = runCmd('devtool status') | ||
2089 | self.assertNotIn(recipe, result.output) | ||
2090 | self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting') | ||
2091 | |||
2001 | def test_devtool_layer_plugins(self): | 2092 | def test_devtool_layer_plugins(self): |
2002 | """Test that devtool can use plugins from other layers. | 2093 | """Test that devtool can use plugins from other layers. |
2003 | 2094 | ||
@@ -2323,7 +2414,7 @@ class DevtoolUpgradeTests(DevtoolBase): | |||
2323 | newsrctree = os.path.join(self.workspacedir, 'sources', newrecipename) | 2414 | newsrctree = os.path.join(self.workspacedir, 'sources', newrecipename) |
2324 | self.assertExists(newsrctree, 'Source directory not renamed') | 2415 | self.assertExists(newsrctree, 'Source directory not renamed') |
2325 | checkvars = {} | 2416 | checkvars = {} |
2326 | checkvars['S'] = '${WORKDIR}/%s-%s' % (recipename, recipever) | 2417 | checkvars['S'] = '${UNPACKDIR}/%s-%s' % (recipename, recipever) |
2327 | checkvars['SRC_URI'] = url | 2418 | checkvars['SRC_URI'] = url |
2328 | self._test_recipe_contents(newrecipefile, checkvars, []) | 2419 | self._test_recipe_contents(newrecipefile, checkvars, []) |
2329 | # Try again - change just name this time | 2420 | # Try again - change just name this time |
@@ -2335,7 +2426,7 @@ class DevtoolUpgradeTests(DevtoolBase): | |||
2335 | self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipename), 'Old recipe directory still exists') | 2426 | self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipename), 'Old recipe directory still exists') |
2336 | self.assertExists(os.path.join(self.workspacedir, 'sources', newrecipename), 'Source directory not renamed') | 2427 | self.assertExists(os.path.join(self.workspacedir, 'sources', newrecipename), 'Source directory not renamed') |
2337 | checkvars = {} | 2428 | checkvars = {} |
2338 | checkvars['S'] = '${WORKDIR}/%s-${PV}' % recipename | 2429 | checkvars['S'] = '${UNPACKDIR}/%s-${PV}' % recipename |
2339 | checkvars['SRC_URI'] = url.replace(recipever, '${PV}') | 2430 | checkvars['SRC_URI'] = url.replace(recipever, '${PV}') |
2340 | self._test_recipe_contents(newrecipefile, checkvars, []) | 2431 | self._test_recipe_contents(newrecipefile, checkvars, []) |
2341 | # Try again - change just version this time | 2432 | # Try again - change just version this time |
@@ -2346,7 +2437,7 @@ class DevtoolUpgradeTests(DevtoolBase): | |||
2346 | self.assertExists(newrecipefile, 'Recipe file not renamed') | 2437 | self.assertExists(newrecipefile, 'Recipe file not renamed') |
2347 | self.assertExists(os.path.join(self.workspacedir, 'sources', recipename), 'Source directory no longer exists') | 2438 | self.assertExists(os.path.join(self.workspacedir, 'sources', recipename), 'Source directory no longer exists') |
2348 | checkvars = {} | 2439 | checkvars = {} |
2349 | checkvars['S'] = '${WORKDIR}/${BPN}-%s' % recipever | 2440 | checkvars['S'] = '${UNPACKDIR}/${BPN}-%s' % recipever |
2350 | checkvars['SRC_URI'] = url | 2441 | checkvars['SRC_URI'] = url |
2351 | self._test_recipe_contents(newrecipefile, checkvars, []) | 2442 | self._test_recipe_contents(newrecipefile, checkvars, []) |
2352 | 2443 | ||
@@ -2468,7 +2559,7 @@ class DevtoolIdeSdkTests(DevtoolBase): | |||
2468 | self.track_for_cleanup(tempdir) | 2559 | self.track_for_cleanup(tempdir) |
2469 | self.add_command_to_tearDown('bitbake -c clean %s' % recipe_name) | 2560 | self.add_command_to_tearDown('bitbake -c clean %s' % recipe_name) |
2470 | 2561 | ||
2471 | result = runCmd('devtool modify %s -x %s' % (recipe_name, tempdir)) | 2562 | result = runCmd('devtool modify %s -x %s --debug-build' % (recipe_name, tempdir)) |
2472 | self.assertExists(os.path.join(tempdir, build_file), | 2563 | self.assertExists(os.path.join(tempdir, build_file), |
2473 | 'Extracted source could not be found') | 2564 | 'Extracted source could not be found') |
2474 | self.assertExists(os.path.join(self.workspacedir, 'conf', | 2565 | self.assertExists(os.path.join(self.workspacedir, 'conf', |
@@ -2520,11 +2611,6 @@ class DevtoolIdeSdkTests(DevtoolBase): | |||
2520 | i_and_d_script_path = os.path.join( | 2611 | i_and_d_script_path = os.path.join( |
2521 | self._workspace_scripts_dir(recipe_name), i_and_d_script) | 2612 | self._workspace_scripts_dir(recipe_name), i_and_d_script) |
2522 | self.assertExists(i_and_d_script_path) | 2613 | self.assertExists(i_and_d_script_path) |
2523 | del_script = "delete_package_dirs_" + recipe_id | ||
2524 | del_script_path = os.path.join( | ||
2525 | self._workspace_scripts_dir(recipe_name), del_script) | ||
2526 | self.assertExists(del_script_path) | ||
2527 | runCmd(del_script_path, cwd=tempdir) | ||
2528 | 2614 | ||
2529 | def _devtool_ide_sdk_qemu(self, tempdir, qemu, recipe_name, example_exe): | 2615 | def _devtool_ide_sdk_qemu(self, tempdir, qemu, recipe_name, example_exe): |
2530 | """Verify deployment and execution in Qemu system work for one recipe. | 2616 | """Verify deployment and execution in Qemu system work for one recipe. |
diff --git a/meta/lib/oeqa/selftest/cases/distrodata.py b/meta/lib/oeqa/selftest/cases/distrodata.py index ad952c004b..f2c6124d70 100644 --- a/meta/lib/oeqa/selftest/cases/distrodata.py +++ b/meta/lib/oeqa/selftest/cases/distrodata.py | |||
@@ -20,10 +20,10 @@ class Distrodata(OESelftestTestCase): | |||
20 | feature = 'LICENSE_FLAGS_ACCEPTED += " commercial"\n' | 20 | feature = 'LICENSE_FLAGS_ACCEPTED += " commercial"\n' |
21 | self.write_config(feature) | 21 | self.write_config(feature) |
22 | 22 | ||
23 | pkgs = oe.recipeutils.get_recipe_upgrade_status() | 23 | pkggroups = oe.recipeutils.get_recipe_upgrade_status() |
24 | 24 | ||
25 | regressed_failures = [pkg[0] for pkg in pkgs if pkg[1] == 'UNKNOWN_BROKEN'] | 25 | regressed_failures = [pkg['pn'] for pkgs in pkggroups for pkg in pkgs if pkg['status'] == 'UNKNOWN_BROKEN'] |
26 | regressed_successes = [pkg[0] for pkg in pkgs if pkg[1] == 'KNOWN_BROKEN'] | 26 | regressed_successes = [pkg['pn'] for pkgs in pkggroups for pkg in pkgs if pkg['status'] == 'KNOWN_BROKEN'] |
27 | msg = "" | 27 | msg = "" |
28 | if len(regressed_failures) > 0: | 28 | if len(regressed_failures) > 0: |
29 | msg = msg + """ | 29 | msg = msg + """ |
@@ -55,8 +55,8 @@ but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please re | |||
55 | return False | 55 | return False |
56 | 56 | ||
57 | def is_maintainer_exception(entry): | 57 | def is_maintainer_exception(entry): |
58 | exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", "libx11-compose-data", | 58 | exceptions = ["musl", "newlib", "picolibc", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", "libx11-compose-data", |
59 | "cve-update-nvd2-native",] | 59 | "cve-update-nvd2-native", "barebox", "libglvnd"] |
60 | for i in exceptions: | 60 | for i in exceptions: |
61 | if i in entry: | 61 | if i in entry: |
62 | return True | 62 | return True |
@@ -115,3 +115,15 @@ The list of oe-core recipes with maintainers is empty. This may indicate that th | |||
115 | self.fail(""" | 115 | self.fail(""" |
116 | Unable to find recipes for the following entries in maintainers.inc: | 116 | Unable to find recipes for the following entries in maintainers.inc: |
117 | """ + "\n".join(['%s' % i for i in missing_recipes])) | 117 | """ + "\n".join(['%s' % i for i in missing_recipes])) |
118 | |||
119 | def test_common_include_recipes(self): | ||
120 | """ | ||
121 | Summary: Test that obtaining recipes that share includes between them returns a sane result | ||
122 | Expected: At least cmake and qemu entries are present in the output | ||
123 | Product: oe-core | ||
124 | Author: Alexander Kanavin <alex.kanavin@gmail.com> | ||
125 | """ | ||
126 | recipes = oe.recipeutils.get_common_include_recipes() | ||
127 | |||
128 | self.assertIn({'qemu-system-native', 'qemu', 'qemu-native'}, recipes) | ||
129 | self.assertIn({'cmake-native', 'cmake'}, recipes) | ||
diff --git a/meta/lib/oeqa/selftest/cases/efibootpartition.py b/meta/lib/oeqa/selftest/cases/efibootpartition.py index fa74103dec..fcfcdaf7e4 100644 --- a/meta/lib/oeqa/selftest/cases/efibootpartition.py +++ b/meta/lib/oeqa/selftest/cases/efibootpartition.py | |||
@@ -6,7 +6,7 @@ | |||
6 | # | 6 | # |
7 | 7 | ||
8 | from oeqa.selftest.case import OESelftestTestCase | 8 | from oeqa.selftest.case import OESelftestTestCase |
9 | from oeqa.utils.commands import bitbake, runqemu | 9 | from oeqa.utils.commands import bitbake, runqemu, get_bb_var |
10 | from oeqa.core.decorator.data import skipIfNotMachine | 10 | from oeqa.core.decorator.data import skipIfNotMachine |
11 | import oe.types | 11 | import oe.types |
12 | 12 | ||
@@ -14,17 +14,18 @@ class GenericEFITest(OESelftestTestCase): | |||
14 | """EFI booting test class""" | 14 | """EFI booting test class""" |
15 | @skipIfNotMachine("qemux86-64", "test is qemux86-64 specific currently") | 15 | @skipIfNotMachine("qemux86-64", "test is qemux86-64 specific currently") |
16 | def test_boot_efi(self): | 16 | def test_boot_efi(self): |
17 | cmd = "runqemu nographic serial wic ovmf" | 17 | image = "core-image-minimal" |
18 | runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', image) or "" | ||
19 | cmd = "runqemu %s nographic serial wic ovmf" % (runqemu_params) | ||
18 | if oe.types.qemu_use_kvm(self.td.get('QEMU_USE_KVM', 0), self.td["TARGET_ARCH"]): | 20 | if oe.types.qemu_use_kvm(self.td.get('QEMU_USE_KVM', 0), self.td["TARGET_ARCH"]): |
19 | cmd += " kvm" | 21 | cmd += " kvm" |
20 | image = "core-image-minimal" | ||
21 | 22 | ||
22 | self.write_config(""" | 23 | self.write_config(""" |
23 | EFI_PROVIDER = "systemd-boot" | 24 | EFI_PROVIDER = "grub-efi" |
24 | IMAGE_FSTYPES:pn-%s:append = " wic" | 25 | IMAGE_FSTYPES:pn-%s:append = " wic" |
25 | MACHINE_FEATURES:append = " efi" | 26 | MACHINE_FEATURES:append = " efi" |
26 | WKS_FILE = "efi-bootdisk.wks.in" | 27 | WKS_FILE = "efi-bootdisk.wks.in" |
27 | IMAGE_INSTALL:append = " grub-efi systemd-boot kernel-image-bzimage" | 28 | IMAGE_INSTALL:append = " grub-efi kernel-image-bzimage" |
28 | """ | 29 | """ |
29 | % (image)) | 30 | % (image)) |
30 | 31 | ||
diff --git a/meta/lib/oeqa/selftest/cases/esdk.py b/meta/lib/oeqa/selftest/cases/esdk.py index 9f5de2cde7..7a5fe00a08 100644 --- a/meta/lib/oeqa/selftest/cases/esdk.py +++ b/meta/lib/oeqa/selftest/cases/esdk.py | |||
@@ -27,11 +27,7 @@ class oeSDKExtSelfTest(OESelftestTestCase): | |||
27 | return glob.glob(pattern)[0] | 27 | return glob.glob(pattern)[0] |
28 | 28 | ||
29 | @staticmethod | 29 | @staticmethod |
30 | def run_esdk_cmd(env_eSDK, tmpdir_eSDKQA, cmd, postconfig=None, **options): | 30 | def run_esdk_cmd(env_eSDK, tmpdir_eSDKQA, cmd, **options): |
31 | if postconfig: | ||
32 | esdk_conf_file = os.path.join(tmpdir_eSDKQA, 'conf', 'local.conf') | ||
33 | with open(esdk_conf_file, 'a+') as f: | ||
34 | f.write(postconfig) | ||
35 | if not options: | 31 | if not options: |
36 | options = {} | 32 | options = {} |
37 | if not 'shell' in options: | 33 | if not 'shell' in options: |
diff --git a/meta/lib/oeqa/selftest/cases/fetch.py b/meta/lib/oeqa/selftest/cases/fetch.py index 44099176fc..1beef5cfed 100644 --- a/meta/lib/oeqa/selftest/cases/fetch.py +++ b/meta/lib/oeqa/selftest/cases/fetch.py | |||
@@ -74,8 +74,8 @@ class Dependencies(OESelftestTestCase): | |||
74 | tinfoil.prepare(config_only=False, quiet=2) | 74 | tinfoil.prepare(config_only=False, quiet=2) |
75 | 75 | ||
76 | r = """ | 76 | r = """ |
77 | LICENSE="CLOSED" | 77 | LICENSE = "CLOSED" |
78 | SRC_URI="http://example.com/tarball.zip" | 78 | SRC_URI = "http://example.com/tarball.zip" |
79 | """ | 79 | """ |
80 | f = self.write_recipe(textwrap.dedent(r), tempdir) | 80 | f = self.write_recipe(textwrap.dedent(r), tempdir) |
81 | d = tinfoil.parse_recipe_file(f) | 81 | d = tinfoil.parse_recipe_file(f) |
@@ -84,8 +84,8 @@ class Dependencies(OESelftestTestCase): | |||
84 | 84 | ||
85 | # Verify that the downloadfilename overrides the URI | 85 | # Verify that the downloadfilename overrides the URI |
86 | r = """ | 86 | r = """ |
87 | LICENSE="CLOSED" | 87 | LICENSE = "CLOSED" |
88 | SRC_URI="https://example.com/tarball;downloadfilename=something.zip" | 88 | SRC_URI = "https://example.com/tarball;downloadfilename=something.zip" |
89 | """ | 89 | """ |
90 | f = self.write_recipe(textwrap.dedent(r), tempdir) | 90 | f = self.write_recipe(textwrap.dedent(r), tempdir) |
91 | d = tinfoil.parse_recipe_file(f) | 91 | d = tinfoil.parse_recipe_file(f) |
@@ -93,8 +93,8 @@ class Dependencies(OESelftestTestCase): | |||
93 | self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends") or "") | 93 | self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends") or "") |
94 | 94 | ||
95 | r = """ | 95 | r = """ |
96 | LICENSE="CLOSED" | 96 | LICENSE = "CLOSED" |
97 | SRC_URI="ftp://example.com/tarball.lz" | 97 | SRC_URI = "ftp://example.com/tarball.lz" |
98 | """ | 98 | """ |
99 | f = self.write_recipe(textwrap.dedent(r), tempdir) | 99 | f = self.write_recipe(textwrap.dedent(r), tempdir) |
100 | d = tinfoil.parse_recipe_file(f) | 100 | d = tinfoil.parse_recipe_file(f) |
@@ -102,8 +102,8 @@ class Dependencies(OESelftestTestCase): | |||
102 | self.assertIn("lzip-native", d.getVarFlag("do_unpack", "depends")) | 102 | self.assertIn("lzip-native", d.getVarFlag("do_unpack", "depends")) |
103 | 103 | ||
104 | r = """ | 104 | r = """ |
105 | LICENSE="CLOSED" | 105 | LICENSE = "CLOSED" |
106 | SRC_URI="git://example.com/repo;branch=master;rev=ffffffffffffffffffffffffffffffffffffffff" | 106 | SRC_URI = "git://example.com/repo;branch=master;rev=ffffffffffffffffffffffffffffffffffffffff" |
107 | """ | 107 | """ |
108 | f = self.write_recipe(textwrap.dedent(r), tempdir) | 108 | f = self.write_recipe(textwrap.dedent(r), tempdir) |
109 | d = tinfoil.parse_recipe_file(f) | 109 | d = tinfoil.parse_recipe_file(f) |
diff --git a/meta/lib/oeqa/selftest/cases/fitimage.py b/meta/lib/oeqa/selftest/cases/fitimage.py index 347c065377..3c40857747 100644 --- a/meta/lib/oeqa/selftest/cases/fitimage.py +++ b/meta/lib/oeqa/selftest/cases/fitimage.py | |||
@@ -4,12 +4,740 @@ | |||
4 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
5 | # | 5 | # |
6 | 6 | ||
7 | from oeqa.selftest.case import OESelftestTestCase | ||
8 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars | ||
9 | import os | 7 | import os |
10 | import re | 8 | import re |
9 | import shlex | ||
10 | import logging | ||
11 | import pprint | ||
12 | import tempfile | ||
13 | |||
14 | import oe.fitimage | ||
15 | |||
16 | from oeqa.selftest.case import OESelftestTestCase | ||
17 | from oeqa.utils.commands import runCmd, bitbake, get_bb_vars, get_bb_var | ||
18 | |||
19 | |||
20 | class BbVarsMockGenKeys: | ||
21 | def __init__(self, keydir, gen_keys="0", sign_enabled="0", keyname="", sign_ind="0", img_keyname=""): | ||
22 | self.bb_vars = { | ||
23 | 'FIT_GENERATE_KEYS': gen_keys, | ||
24 | 'FIT_KEY_GENRSA_ARGS': "-F4", | ||
25 | 'FIT_KEY_REQ_ARGS': "-batch -new", | ||
26 | 'FIT_KEY_SIGN_PKCS': "-x509", | ||
27 | 'FIT_SIGN_INDIVIDUAL': sign_ind, | ||
28 | 'FIT_SIGN_NUMBITS': "2048", | ||
29 | 'UBOOT_SIGN_ENABLE': sign_enabled, | ||
30 | 'UBOOT_SIGN_IMG_KEYNAME': img_keyname, | ||
31 | 'UBOOT_SIGN_KEYDIR': keydir, | ||
32 | 'UBOOT_SIGN_KEYNAME': keyname, | ||
33 | } | ||
34 | |||
35 | def getVar(self, var): | ||
36 | return self.bb_vars[var] | ||
37 | |||
38 | class FitImageTestCase(OESelftestTestCase): | ||
39 | """Test functions usable for testing kernel-fitimage.bbclass and uboot-sign.bbclass | ||
40 | |||
41 | A brief summary showing the structure of a test case: | ||
42 | |||
43 | self._test_fitimage() | ||
44 | # Generate a local.conf file and bitbake the bootloader or the kernel | ||
45 | self._bitbake_fit_image() | ||
46 | |||
47 | # Check if the its file contains the expected paths and attributes. | ||
48 | # The _get_req_* functions are implemented by more specific chield classes. | ||
49 | self._check_its_file() | ||
50 | req_its_paths = self._get_req_its_paths() | ||
51 | req_sigvalues_config = self._get_req_sigvalues_config() | ||
52 | req_sigvalues_image = self._get_req_sigvalues_image() | ||
53 | # Compare the its file against req_its_paths, req_sigvalues_config, req_sigvalues_image | ||
54 | |||
55 | # Call the dumpimage utiliy and check that it prints all the expected paths and attributes | ||
56 | # The _get_req_* functions are implemented by more specific chield classes. | ||
57 | self._check_fitimage() | ||
58 | self._get_req_sections() | ||
59 | # Compare the output of the dumpimage utility against | ||
60 | """ | ||
61 | |||
62 | MKIMAGE_HASH_LENGTHS = { 'sha256': 64, 'sha384': 96, 'sha512': 128 } | ||
63 | MKIMAGE_SIGNATURE_LENGTHS = { 'rsa2048': 512 } | ||
64 | |||
65 | def _gen_signing_key(self, bb_vars): | ||
66 | """Generate a key pair and a singing certificate | ||
67 | |||
68 | Generate a UBOOT_SIGN_KEYNAME in the UBOOT_SIGN_KEYDIR similar to what | ||
69 | the FIT_GENERATE_KEYS feature does. However, having a static key is | ||
70 | probably a more realistic use case than generating a random key with | ||
71 | each clean build. So this needs to be tested as well. | ||
72 | The FIT_GENERATE_KEYS generates 2 keys: The UBOOT_SIGN_KEYNAME and the | ||
73 | UBOOT_SIGN_IMG_KEYNAME. The UBOOT_SIGN_IMG_KEYNAME is used by the | ||
74 | FIT_SIGN_INDIVIDUAL feature only. Testing if everything is working if | ||
75 | there is only one key available is important as well. Therefore this | ||
76 | function generates only the keys which are really needed, not just two. | ||
77 | """ | ||
78 | |||
79 | # Define some variables which are usually defined by the kernel-fitimage.bbclass. | ||
80 | # But for testing purpose check if the uboot-sign.bbclass is independent from | ||
81 | # the kernel-fitimage.bbclass | ||
82 | fit_sign_numbits = bb_vars.get('FIT_SIGN_NUMBITS', "2048") | ||
83 | fit_key_genrsa_args = bb_vars.get('FIT_KEY_GENRSA_ARGS', "-F4") | ||
84 | fit_key_req_args = bb_vars.get('FIT_KEY_REQ_ARGS', "-batch -new") | ||
85 | fit_key_sign_pkcs = bb_vars.get('FIT_KEY_SIGN_PKCS', "-x509") | ||
86 | |||
87 | uboot_sign_keydir = bb_vars['UBOOT_SIGN_KEYDIR'] | ||
88 | sign_keys = [bb_vars['UBOOT_SIGN_KEYNAME']] | ||
89 | if bb_vars['FIT_SIGN_INDIVIDUAL'] == "1": | ||
90 | sign_keys.append(bb_vars['UBOOT_SIGN_IMG_KEYNAME']) | ||
91 | for sign_key in sign_keys: | ||
92 | sing_key_path = os.path.join(uboot_sign_keydir, sign_key) | ||
93 | if not os.path.isdir(uboot_sign_keydir): | ||
94 | os.makedirs(uboot_sign_keydir) | ||
95 | openssl_bindir = FitImageTestCase._setup_native('openssl-native') | ||
96 | openssl_path = os.path.join(openssl_bindir, 'openssl') | ||
97 | runCmd("%s genrsa %s -out %s.key %s" % ( | ||
98 | openssl_path, | ||
99 | fit_key_genrsa_args, | ||
100 | sing_key_path, | ||
101 | fit_sign_numbits | ||
102 | )) | ||
103 | runCmd("%s req %s %s -key %s.key -out %s.crt" % ( | ||
104 | openssl_path, | ||
105 | fit_key_req_args, | ||
106 | fit_key_sign_pkcs, | ||
107 | sing_key_path, | ||
108 | sing_key_path | ||
109 | )) | ||
110 | |||
111 | @staticmethod | ||
112 | def _gen_random_file(file_path, num_bytes=65536): | ||
113 | with open(file_path, 'wb') as file_out: | ||
114 | file_out.write(os.urandom(num_bytes)) | ||
115 | |||
116 | @staticmethod | ||
117 | def _setup_native(native_recipe): | ||
118 | """Build a native recipe and return the path to its bindir in RECIPE_SYSROOT_NATIVE""" | ||
119 | bitbake(native_recipe + " -c addto_recipe_sysroot") | ||
120 | vars = get_bb_vars(['RECIPE_SYSROOT_NATIVE', 'bindir'], native_recipe) | ||
121 | return os.path.join(vars['RECIPE_SYSROOT_NATIVE'], vars['bindir']) | ||
122 | |||
123 | def _verify_fit_image_signature(self, uboot_tools_bindir, fitimage_path, dtb_path, conf_name=None): | ||
124 | """Verify the signature of a fit configuration | ||
125 | |||
126 | The fit_check_sign utility from u-boot-tools-native is called. | ||
127 | uboot-fit_check_sign -f fitImage -k $dtb_path -c conf-$dtb_name | ||
128 | dtb_path refers to a binary device tree containing the public key. | ||
129 | """ | ||
130 | fit_check_sign_path = os.path.join(uboot_tools_bindir, 'uboot-fit_check_sign') | ||
131 | cmd = '%s -f %s -k %s' % (fit_check_sign_path, fitimage_path, dtb_path) | ||
132 | if conf_name: | ||
133 | cmd += ' -c %s' % conf_name | ||
134 | result = runCmd(cmd) | ||
135 | self.logger.debug("%s\nreturned: %s\n%s", cmd, str(result.status), result.output) | ||
136 | self.assertIn("Signature check OK", result.output) | ||
137 | |||
138 | def _verify_dtb_property(self, dtc_bindir, dtb_path, node_path, property_name, req_property, absent=False): | ||
139 | """Verify device tree properties | ||
140 | |||
141 | The fdtget utility from dtc-native is called and the property is compared. | ||
142 | """ | ||
143 | fdtget_path = os.path.join(dtc_bindir, 'fdtget') | ||
144 | cmd = '%s %s %s %s' % (fdtget_path, dtb_path, node_path, property_name) | ||
145 | if absent: | ||
146 | result = runCmd(cmd, ignore_status=True) | ||
147 | self.logger.debug("%s\nreturned: %s\n%s", cmd, str(result.status), result.output) | ||
148 | self.assertIn("FDT_ERR_NOTFOUND", result.output) | ||
149 | else: | ||
150 | result = runCmd(cmd) | ||
151 | self.logger.debug("%s\nreturned: %s\n%s", cmd, str(result.status), result.output) | ||
152 | self.assertEqual(req_property, result.output.strip()) | ||
153 | |||
154 | @staticmethod | ||
155 | def _find_string_in_bin_file(file_path, search_string): | ||
156 | """find strings in a binary file | ||
157 | |||
158 | Shell equivalent: strings "$1" | grep "$2" | wc -l | ||
159 | return number of matches | ||
160 | """ | ||
161 | found_positions = 0 | ||
162 | with open(file_path, 'rb') as file: | ||
163 | content = file.read().decode('ascii', errors='ignore') | ||
164 | found_positions = content.count(search_string) | ||
165 | return found_positions | ||
166 | |||
167 | @staticmethod | ||
168 | def _get_uboot_mkimage_sign_args(uboot_mkimage_sign_args): | ||
169 | """Retrive the string passed via -c to the mkimage command | ||
170 | |||
171 | Example: If a build configutation defines | ||
172 | UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" | ||
173 | this function returns "a smart comment" | ||
174 | """ | ||
175 | a_comment = None | ||
176 | if uboot_mkimage_sign_args: | ||
177 | mkimage_args = shlex.split(uboot_mkimage_sign_args) | ||
178 | try: | ||
179 | c_index = mkimage_args.index('-c') | ||
180 | a_comment = mkimage_args[c_index+1] | ||
181 | except ValueError: | ||
182 | pass | ||
183 | return a_comment | ||
184 | |||
185 | @staticmethod | ||
186 | def _get_dtb_files(bb_vars): | ||
187 | """Return a list of devicetree names | ||
188 | |||
189 | The list should be used to check the dtb and conf nodes in the FIT image or its file. | ||
190 | In addition to the entries from KERNEL_DEVICETREE, the external devicetree and the | ||
191 | external devicetree overlay added by the test recipe bbb-dtbs-as-ext are handled as well. | ||
192 | """ | ||
193 | kernel_devicetree = bb_vars.get('KERNEL_DEVICETREE') | ||
194 | all_dtbs = [] | ||
195 | dtb_symlinks = [] | ||
196 | if kernel_devicetree: | ||
197 | all_dtbs += [os.path.basename(dtb) for dtb in kernel_devicetree.split()] | ||
198 | # Support only the test recipe which provides 1 devicetree and 1 devicetree overlay | ||
199 | pref_prov_dtb = bb_vars.get('PREFERRED_PROVIDER_virtual/dtb') | ||
200 | if pref_prov_dtb == "bbb-dtbs-as-ext": | ||
201 | all_dtbs += ["am335x-bonegreen-ext.dtb", "BBORG_RELAY-00A2.dtbo"] | ||
202 | dtb_symlinks.append("am335x-bonegreen-ext-alias.dtb") | ||
203 | return (all_dtbs, dtb_symlinks) | ||
204 | |||
205 | def _is_req_dict_in_dict(self, found_dict, req_dict): | ||
206 | """ | ||
207 | Check if all key-value pairs in the required dictionary are present in the found dictionary. | ||
208 | |||
209 | This function recursively checks if the required dictionary (`req_dict`) is a subset of the found dictionary (`found_dict`). | ||
210 | It supports nested dictionaries, strings, lists, and sets as values. | ||
211 | |||
212 | Args: | ||
213 | found_dict (dict): The dictionary to search within. | ||
214 | req_dict (dict): The dictionary containing the required key-value pairs. | ||
215 | """ | ||
216 | for key, value in req_dict.items(): | ||
217 | self.assertIn(key, found_dict) | ||
218 | if isinstance(value, dict): | ||
219 | self._is_req_dict_in_dict(found_dict[key], value) | ||
220 | elif isinstance(value, str): | ||
221 | self.assertIn(value, found_dict[key]) | ||
222 | elif isinstance(value, list): | ||
223 | self.assertLessEqual(set(value), set(found_dict[key])) | ||
224 | elif isinstance(value, set): | ||
225 | self.assertLessEqual(value, found_dict[key]) | ||
226 | else: | ||
227 | self.assertEqual(value, found_dict[key]) | ||
228 | |||
229 | def _check_its_file(self, bb_vars, its_file_path): | ||
230 | """Check if the its file contains the expected sections and fields""" | ||
231 | # print the its file for debugging | ||
232 | if logging.DEBUG >= self.logger.level: | ||
233 | with open(its_file_path) as its_file: | ||
234 | self.logger.debug("its file: %s" % its_file.read()) | ||
235 | |||
236 | # Generate a list of expected paths in the its file | ||
237 | req_its_paths = self._get_req_its_paths(bb_vars) | ||
238 | self.logger.debug("req_its_paths:\n%s\n" % pprint.pformat(req_its_paths, indent=4)) | ||
239 | |||
240 | # Generate a dict of expected configuration signature nodes | ||
241 | req_sigvalues_config = self._get_req_sigvalues_config(bb_vars) | ||
242 | self.logger.debug("req_sigvalues_config:\n%s\n" % pprint.pformat(req_sigvalues_config, indent=4)) | ||
243 | |||
244 | # Generate a dict of expected image signature nodes | ||
245 | req_sigvalues_image = self._get_req_sigvalues_image(bb_vars) | ||
246 | self.logger.debug("req_sigvalues_image:\n%s\n" % pprint.pformat(req_sigvalues_image, indent=4)) | ||
247 | |||
248 | # Parse the its file for paths and signatures | ||
249 | its_path = [] | ||
250 | its_paths = [] | ||
251 | linect = 0 | ||
252 | sigs = {} | ||
253 | with open(its_file_path) as its_file: | ||
254 | for line in its_file: | ||
255 | linect += 1 | ||
256 | line = line.strip() | ||
257 | if line.endswith('};'): | ||
258 | its_path.pop() | ||
259 | elif line.endswith('{'): | ||
260 | its_path.append(line[:-1].strip()) | ||
261 | its_paths.append(its_path[:]) | ||
262 | # kernel-fitimage uses signature-1, uboot-sign uses signature | ||
263 | elif its_path and (its_path[-1] == 'signature-1' or its_path[-1] == 'signature'): | ||
264 | itsdotpath = '.'.join(its_path) | ||
265 | if not itsdotpath in sigs: | ||
266 | sigs[itsdotpath] = {} | ||
267 | if not '=' in line or not line.endswith(';'): | ||
268 | self.fail('Unexpected formatting in %s sigs section line %d:%s' % (its_file_path, linect, line)) | ||
269 | key, value = line.split('=', 1) | ||
270 | sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';') | ||
271 | |||
272 | # Check if all expected paths are found in the its file | ||
273 | self.logger.debug("itspaths:\n%s\n" % pprint.pformat(its_paths, indent=4)) | ||
274 | for req_path in req_its_paths: | ||
275 | if not req_path in its_paths: | ||
276 | self.fail('Missing path in its file: %s (%s)' % (req_path, its_file_path)) | ||
277 | |||
278 | # Check if all the expected singnature nodes (images and configurations) are found | ||
279 | self.logger.debug("sigs:\n%s\n" % pprint.pformat(sigs, indent=4)) | ||
280 | if req_sigvalues_config or req_sigvalues_image: | ||
281 | for its_path, values in sigs.items(): | ||
282 | if bb_vars.get('FIT_CONF_PREFIX', "conf-") in its_path: | ||
283 | reqsigvalues = req_sigvalues_config | ||
284 | else: | ||
285 | reqsigvalues = req_sigvalues_image | ||
286 | for reqkey, reqvalue in reqsigvalues.items(): | ||
287 | value = values.get(reqkey, None) | ||
288 | if value is None: | ||
289 | self.fail('Missing key "%s" in its file signature section %s (%s)' % (reqkey, its_path, its_file_path)) | ||
290 | self.assertEqual(value, reqvalue) | ||
291 | |||
292 | # Generate a list of expected fields in the its file | ||
293 | req_its_fields = self._get_req_its_fields(bb_vars) | ||
294 | self.logger.debug("req_its_fields:\n%s\n" % pprint.pformat(req_its_fields, indent=4)) | ||
295 | |||
296 | # Check if all expected fields are in the its file | ||
297 | if req_its_fields: | ||
298 | field_index = 0 | ||
299 | field_index_last = len(req_its_fields) - 1 | ||
300 | with open(its_file_path) as its_file: | ||
301 | for line in its_file: | ||
302 | if req_its_fields[field_index] in line: | ||
303 | if field_index < field_index_last: | ||
304 | field_index +=1 | ||
305 | else: | ||
306 | break | ||
307 | self.assertEqual(field_index, field_index_last, | ||
308 | "Fields in Image Tree Source File %s did not match, error in finding %s" | ||
309 | % (its_file_path, req_its_fields[field_index])) | ||
310 | |||
311 | def _check_fitimage(self, bb_vars, fitimage_path, uboot_tools_bindir): | ||
312 | """Run dumpimage on the final FIT image and parse the output into a dict""" | ||
313 | dumpimage_path = os.path.join(uboot_tools_bindir, 'dumpimage') | ||
314 | cmd = '%s -l %s' % (dumpimage_path, fitimage_path) | ||
315 | self.logger.debug("Analyzing output from dumpimage: %s" % cmd) | ||
316 | dumpimage_result = runCmd(cmd) | ||
317 | in_section = None | ||
318 | sections = {} | ||
319 | self.logger.debug("dumpimage output: %s" % dumpimage_result.output) | ||
320 | for line in dumpimage_result.output.splitlines(): | ||
321 | # Find potentially hashed and signed sections | ||
322 | if line.startswith((' Configuration', ' Image')): | ||
323 | in_section = re.search(r'\((.*)\)', line).groups()[0] | ||
324 | # Key value lines start with two spaces otherwise the section ended | ||
325 | elif not line.startswith(" "): | ||
326 | in_section = None | ||
327 | # Handle key value lines of this section | ||
328 | elif in_section: | ||
329 | if not in_section in sections: | ||
330 | sections[in_section] = {} | ||
331 | try: | ||
332 | key, value = line.split(':', 1) | ||
333 | key = key.strip() | ||
334 | value = value.strip() | ||
335 | except ValueError as val_err: | ||
336 | # Handle multiple entries as e.g. for Loadables as a list | ||
337 | if key and line.startswith(" "): | ||
338 | value = sections[in_section][key] + "," + line.strip() | ||
339 | else: | ||
340 | raise ValueError(f"Error processing line: '{line}'. Original error: {val_err}") | ||
341 | sections[in_section][key] = value | ||
342 | |||
343 | # Check if the requested dictionary is a subset of the parsed dictionary | ||
344 | req_sections, num_signatures = self._get_req_sections(bb_vars) | ||
345 | self.logger.debug("req_sections: \n%s\n" % pprint.pformat(req_sections, indent=4)) | ||
346 | self.logger.debug("dumpimage sections: \n%s\n" % pprint.pformat(sections, indent=4)) | ||
347 | self._is_req_dict_in_dict(sections, req_sections) | ||
348 | |||
349 | # Call the signing related checks if the function is provided by a inherited class | ||
350 | self._check_signing(bb_vars, sections, num_signatures, uboot_tools_bindir, fitimage_path) | ||
351 | |||
352 | def _get_req_its_paths(self, bb_vars): | ||
353 | self.logger.error("This function needs to be implemented") | ||
354 | return [] | ||
355 | |||
356 | def _get_req_its_fields(self, bb_vars): | ||
357 | self.logger.error("This function needs to be implemented") | ||
358 | return [] | ||
359 | |||
360 | def _get_req_sigvalues_config(self, bb_vars): | ||
361 | self.logger.error("This function needs to be implemented") | ||
362 | return {} | ||
363 | |||
364 | def _get_req_sigvalues_image(self, bb_vars): | ||
365 | self.logger.error("This function needs to be implemented") | ||
366 | return {} | ||
367 | |||
368 | def _get_req_sections(self, bb_vars): | ||
369 | self.logger.error("This function needs to be implemented") | ||
370 | return ({}, 0) | ||
371 | |||
372 | def _check_signing(self, bb_vars, sections, num_signatures, uboot_tools_bindir, fitimage_path): | ||
373 | """Verify the signatures in the FIT image.""" | ||
374 | self.fail("Function needs to be implemented by inheriting classes") | ||
375 | |||
376 | def _bitbake_fit_image(self, bb_vars): | ||
377 | """Bitbake the FIT image and return the paths to the its file and the FIT image""" | ||
378 | self.fail("Function needs to be implemented by inheriting classes") | ||
379 | |||
380 | def _test_fitimage(self, bb_vars): | ||
381 | """Check if the its file and the FIT image are created and signed correctly""" | ||
382 | fitimage_its_path, fitimage_path = self._bitbake_fit_image(bb_vars) | ||
383 | self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path)) | ||
384 | self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path)) | ||
385 | |||
386 | self.logger.debug("Checking its: %s" % fitimage_its_path) | ||
387 | self._check_its_file(bb_vars, fitimage_its_path) | ||
388 | |||
389 | # Setup u-boot-tools-native | ||
390 | uboot_tools_bindir = FitImageTestCase._setup_native('u-boot-tools-native') | ||
391 | |||
392 | # Verify the FIT image | ||
393 | self._check_fitimage(bb_vars, fitimage_path, uboot_tools_bindir) | ||
394 | |||
395 | class KernelFitImageBase(FitImageTestCase): | ||
396 | """Test cases for the linux-yocto-fitimage recipe""" | ||
397 | |||
398 | def _fit_get_bb_vars(self, additional_vars=[]): | ||
399 | """Retrieve BitBake variables specific to the test case. | ||
400 | |||
401 | Call the get_bb_vars function once and get all variables needed by the test case. | ||
402 | """ | ||
403 | internal_used = { | ||
404 | 'DEPLOY_DIR_IMAGE', | ||
405 | 'FIT_CONF_DEFAULT_DTB', | ||
406 | 'FIT_CONF_PREFIX', | ||
407 | 'FIT_DESC', | ||
408 | 'FIT_HASH_ALG', | ||
409 | 'FIT_KERNEL_COMP_ALG', | ||
410 | 'FIT_SIGN_ALG', | ||
411 | 'FIT_SIGN_INDIVIDUAL', | ||
412 | 'FIT_UBOOT_ENV', | ||
413 | 'INITRAMFS_IMAGE_BUNDLE', | ||
414 | 'INITRAMFS_IMAGE_NAME', | ||
415 | 'INITRAMFS_IMAGE', | ||
416 | 'KERNEL_DEPLOYSUBDIR', | ||
417 | 'KERNEL_DEVICETREE', | ||
418 | 'KERNEL_FIT_LINK_NAME', | ||
419 | 'MACHINE', | ||
420 | 'PREFERRED_PROVIDER_virtual/dtb', | ||
421 | 'UBOOT_ARCH', | ||
422 | 'UBOOT_ENTRYPOINT', | ||
423 | 'UBOOT_LOADADDRESS', | ||
424 | 'UBOOT_MKIMAGE_KERNEL_TYPE', | ||
425 | 'UBOOT_MKIMAGE_SIGN_ARGS', | ||
426 | 'UBOOT_RD_ENTRYPOINT', | ||
427 | 'UBOOT_RD_LOADADDRESS', | ||
428 | 'UBOOT_SIGN_ENABLE', | ||
429 | 'UBOOT_SIGN_IMG_KEYNAME', | ||
430 | 'UBOOT_SIGN_KEYDIR', | ||
431 | 'UBOOT_SIGN_KEYNAME', | ||
432 | } | ||
433 | bb_vars = get_bb_vars(list(internal_used | set(additional_vars)), self.kernel_recipe) | ||
434 | self.logger.debug("bb_vars: %s" % pprint.pformat(bb_vars, indent=4)) | ||
435 | return bb_vars | ||
436 | |||
437 | def _config_add_kernel_classes(self, config): | ||
438 | config += '# Use kernel-fit-extra-artifacts.bbclass for the creation of the vmlinux artifact' + os.linesep | ||
439 | config += 'KERNEL_CLASSES = "kernel-fit-extra-artifacts"' + os.linesep | ||
440 | return config | ||
441 | |||
442 | @property | ||
443 | def kernel_recipe(self): | ||
444 | return "linux-yocto-fitimage" | ||
445 | |||
446 | def _config_add_uboot_env(self, config): | ||
447 | """Generate an u-boot environment | ||
11 | 448 | ||
12 | class FitImageTests(OESelftestTestCase): | 449 | Create a boot.cmd file that is packed into the FIT image as a source-able text file. |
450 | Updates the configuration to include the boot.cmd file. | ||
451 | """ | ||
452 | fit_uenv_file = "boot.cmd" | ||
453 | test_files_dir = "test-files" | ||
454 | fit_uenv_path = os.path.join(self.builddir, test_files_dir, fit_uenv_file) | ||
455 | |||
456 | config += '# Add an u-boot script to the fitImage' + os.linesep | ||
457 | config += 'FIT_UBOOT_ENV = "%s"' % fit_uenv_file + os.linesep | ||
458 | config += 'FILESEXTRAPATHS:prepend := "${TOPDIR}/%s:"' % test_files_dir + os.linesep | ||
459 | config += 'SRC_URI:append:pn-%s = " file://${FIT_UBOOT_ENV}"' % self.kernel_recipe + os.linesep | ||
460 | |||
461 | if not os.path.isdir(test_files_dir): | ||
462 | os.makedirs(test_files_dir) | ||
463 | self.logger.debug("Writing to: %s" % fit_uenv_path) | ||
464 | with open(fit_uenv_path, "w") as f: | ||
465 | f.write('echo "hello world"') | ||
466 | |||
467 | return config | ||
468 | |||
469 | def _bitbake_fit_image(self, bb_vars): | ||
470 | """Bitbake the kernel and return the paths to the its file and the FIT image""" | ||
471 | bitbake(self.kernel_recipe) | ||
472 | |||
473 | # Find the right its file and the final fitImage and check if both files are available | ||
474 | deploy_dir_image = bb_vars['DEPLOY_DIR_IMAGE'] | ||
475 | initramfs_image = bb_vars['INITRAMFS_IMAGE'] | ||
476 | initramfs_image_bundle = bb_vars['INITRAMFS_IMAGE_BUNDLE'] | ||
477 | initramfs_image_name = bb_vars['INITRAMFS_IMAGE_NAME'] | ||
478 | kernel_fit_link_name = bb_vars['KERNEL_FIT_LINK_NAME'] | ||
479 | if not initramfs_image and initramfs_image_bundle != "1": | ||
480 | fitimage_its_name = "fitImage-its-%s" % kernel_fit_link_name | ||
481 | fitimage_name = "fitImage" | ||
482 | elif initramfs_image and initramfs_image_bundle != "1": | ||
483 | fitimage_its_name = "fitImage-its-%s-%s" % (initramfs_image_name, kernel_fit_link_name) | ||
484 | fitimage_name = "fitImage-%s-%s" % (initramfs_image_name, kernel_fit_link_name) | ||
485 | elif initramfs_image and initramfs_image_bundle == "1": | ||
486 | fitimage_its_name = "fitImage-its-%s-%s" % (initramfs_image_name, kernel_fit_link_name) | ||
487 | fitimage_name = "fitImage" # or fitImage-${KERNEL_IMAGE_LINK_NAME}${KERNEL_IMAGE_BIN_EXT} | ||
488 | else: | ||
489 | self.fail('Invalid configuration: INITRAMFS_IMAGE_BUNDLE = "1" and not INITRAMFS_IMAGE') | ||
490 | kernel_deploysubdir = bb_vars['KERNEL_DEPLOYSUBDIR'] | ||
491 | if kernel_deploysubdir: | ||
492 | fitimage_its_path = os.path.realpath(os.path.join(deploy_dir_image, kernel_deploysubdir, fitimage_its_name)) | ||
493 | fitimage_path = os.path.realpath(os.path.join(deploy_dir_image, kernel_deploysubdir, fitimage_name)) | ||
494 | else: | ||
495 | fitimage_its_path = os.path.realpath(os.path.join(deploy_dir_image, fitimage_its_name)) | ||
496 | fitimage_path = os.path.realpath(os.path.join(deploy_dir_image, fitimage_name)) | ||
497 | return (fitimage_its_path, fitimage_path) | ||
498 | |||
499 | def _get_req_its_paths(self, bb_vars): | ||
500 | """Generate a list of expected paths in the its file | ||
501 | |||
502 | Example: | ||
503 | [ | ||
504 | ['/', 'images', 'kernel-1', 'hash-1'], | ||
505 | ['/', 'images', 'kernel-1', 'signature-1'], | ||
506 | ] | ||
507 | """ | ||
508 | dtb_files, dtb_symlinks = FitImageTestCase._get_dtb_files(bb_vars) | ||
509 | fit_sign_individual = bb_vars['FIT_SIGN_INDIVIDUAL'] | ||
510 | fit_uboot_env = bb_vars['FIT_UBOOT_ENV'] | ||
511 | initramfs_image = bb_vars['INITRAMFS_IMAGE'] | ||
512 | initramfs_image_bundle = bb_vars['INITRAMFS_IMAGE_BUNDLE'] | ||
513 | uboot_sign_enable = bb_vars.get('UBOOT_SIGN_ENABLE') | ||
514 | |||
515 | # image nodes | ||
516 | images = [ 'kernel-1' ] | ||
517 | if dtb_files: | ||
518 | images += [ 'fdt-' + dtb for dtb in dtb_files ] | ||
519 | if fit_uboot_env: | ||
520 | images.append('bootscr-' + fit_uboot_env) | ||
521 | if bb_vars['MACHINE'] == "qemux86-64": # Not really the right if | ||
522 | images.append('setup-1') | ||
523 | if initramfs_image and initramfs_image_bundle != "1": | ||
524 | images.append('ramdisk-1') | ||
525 | |||
526 | # configuration nodes (one per DTB and also one per symlink) | ||
527 | if dtb_files: | ||
528 | configurations = [bb_vars['FIT_CONF_PREFIX'] + conf for conf in dtb_files + dtb_symlinks] | ||
529 | else: | ||
530 | configurations = [bb_vars['FIT_CONF_PREFIX'] + '1'] | ||
531 | |||
532 | # Create a list of paths for all image and configuration nodes | ||
533 | req_its_paths = [] | ||
534 | for image in images: | ||
535 | req_its_paths.append(['/', 'images', image, 'hash-1']) | ||
536 | if uboot_sign_enable == "1" and fit_sign_individual == "1": | ||
537 | req_its_paths.append(['/', 'images', image, 'signature-1']) | ||
538 | for configuration in configurations: | ||
539 | req_its_paths.append(['/', 'configurations', configuration, 'hash-1']) | ||
540 | if uboot_sign_enable == "1": | ||
541 | req_its_paths.append(['/', 'configurations', configuration, 'signature-1']) | ||
542 | return req_its_paths | ||
543 | |||
544 | def _get_req_its_fields(self, bb_vars): | ||
545 | initramfs_image = bb_vars['INITRAMFS_IMAGE'] | ||
546 | initramfs_image_bundle = bb_vars['INITRAMFS_IMAGE_BUNDLE'] | ||
547 | uboot_rd_loadaddress = bb_vars.get('UBOOT_RD_LOADADDRESS') | ||
548 | uboot_rd_entrypoint = bb_vars.get('UBOOT_RD_ENTRYPOINT') | ||
549 | |||
550 | its_field_check = [ | ||
551 | 'description = "%s";' % bb_vars['FIT_DESC'], | ||
552 | 'description = "Linux kernel";', | ||
553 | 'type = "' + str(bb_vars['UBOOT_MKIMAGE_KERNEL_TYPE']) + '";', | ||
554 | # 'compression = "' + str(bb_vars['FIT_KERNEL_COMP_ALG']) + '";', defined based on files in TMPDIR, not ideal... | ||
555 | 'data = /incbin/("linux.bin");', | ||
556 | 'arch = "' + str(bb_vars['UBOOT_ARCH']) + '";', | ||
557 | 'os = "linux";', | ||
558 | 'load = <' + str(bb_vars['UBOOT_LOADADDRESS']) + '>;', | ||
559 | 'entry = <' + str(bb_vars['UBOOT_ENTRYPOINT']) + '>;', | ||
560 | ] | ||
561 | if initramfs_image and initramfs_image_bundle != "1": | ||
562 | its_field_check.append('type = "ramdisk";') | ||
563 | if uboot_rd_loadaddress: | ||
564 | its_field_check.append("load = <%s>;" % uboot_rd_loadaddress) | ||
565 | if uboot_rd_entrypoint: | ||
566 | its_field_check.append("entry = <%s>;" % uboot_rd_entrypoint) | ||
567 | |||
568 | fit_conf_default_dtb = bb_vars.get('FIT_CONF_DEFAULT_DTB') | ||
569 | if fit_conf_default_dtb: | ||
570 | fit_conf_prefix = bb_vars.get('FIT_CONF_PREFIX', "conf-") | ||
571 | its_field_check.append('default = "' + fit_conf_prefix + fit_conf_default_dtb + '";') | ||
572 | |||
573 | its_field_check.append('kernel = "kernel-1";') | ||
574 | |||
575 | if initramfs_image and initramfs_image_bundle != "1": | ||
576 | its_field_check.append('ramdisk = "ramdisk-1";') | ||
577 | |||
578 | return its_field_check | ||
579 | |||
580 | def _get_req_sigvalues_config(self, bb_vars): | ||
581 | """Generate a dictionary of expected configuration signature nodes""" | ||
582 | if bb_vars.get('UBOOT_SIGN_ENABLE') != "1": | ||
583 | return {} | ||
584 | sign_images = '"kernel", "fdt"' | ||
585 | if bb_vars['INITRAMFS_IMAGE'] and bb_vars['INITRAMFS_IMAGE_BUNDLE'] != "1": | ||
586 | sign_images += ', "ramdisk"' | ||
587 | if bb_vars['FIT_UBOOT_ENV']: | ||
588 | sign_images += ', "bootscr"' | ||
589 | req_sigvalues_config = { | ||
590 | 'algo': '"%s,%s"' % (bb_vars['FIT_HASH_ALG'], bb_vars['FIT_SIGN_ALG']), | ||
591 | 'key-name-hint': '"%s"' % bb_vars['UBOOT_SIGN_KEYNAME'], | ||
592 | 'sign-images': sign_images, | ||
593 | } | ||
594 | return req_sigvalues_config | ||
595 | |||
596 | def _get_req_sigvalues_image(self, bb_vars): | ||
597 | """Generate a dictionary of expected image signature nodes""" | ||
598 | if bb_vars['FIT_SIGN_INDIVIDUAL'] != "1": | ||
599 | return {} | ||
600 | req_sigvalues_image = { | ||
601 | 'algo': '"%s,%s"' % (bb_vars['FIT_HASH_ALG'], bb_vars['FIT_SIGN_ALG']), | ||
602 | 'key-name-hint': '"%s"' % bb_vars['UBOOT_SIGN_IMG_KEYNAME'], | ||
603 | } | ||
604 | return req_sigvalues_image | ||
605 | |||
606 | def _get_req_sections(self, bb_vars): | ||
607 | """Generate a dictionary of expected sections in the output of dumpimage""" | ||
608 | dtb_files, dtb_symlinks = FitImageTestCase._get_dtb_files(bb_vars) | ||
609 | fit_hash_alg = bb_vars['FIT_HASH_ALG'] | ||
610 | fit_sign_alg = bb_vars['FIT_SIGN_ALG'] | ||
611 | fit_sign_individual = bb_vars['FIT_SIGN_INDIVIDUAL'] | ||
612 | fit_uboot_env = bb_vars['FIT_UBOOT_ENV'] | ||
613 | initramfs_image = bb_vars['INITRAMFS_IMAGE'] | ||
614 | initramfs_image_bundle = bb_vars['INITRAMFS_IMAGE_BUNDLE'] | ||
615 | uboot_sign_enable = bb_vars['UBOOT_SIGN_ENABLE'] | ||
616 | uboot_sign_img_keyname = bb_vars['UBOOT_SIGN_IMG_KEYNAME'] | ||
617 | uboot_sign_keyname = bb_vars['UBOOT_SIGN_KEYNAME'] | ||
618 | num_signatures = 0 | ||
619 | req_sections = { | ||
620 | "kernel-1": { | ||
621 | "Type": "Kernel Image", | ||
622 | "OS": "Linux", | ||
623 | "Load Address": bb_vars['UBOOT_LOADADDRESS'], | ||
624 | "Entry Point": bb_vars['UBOOT_ENTRYPOINT'], | ||
625 | } | ||
626 | } | ||
627 | # Create one section per DTB | ||
628 | for dtb in dtb_files: | ||
629 | req_sections['fdt-' + dtb] = { | ||
630 | "Type": "Flat Device Tree", | ||
631 | } | ||
632 | # Add a script section if there is a script | ||
633 | if fit_uboot_env: | ||
634 | req_sections['bootscr-' + fit_uboot_env] = { "Type": "Script" } | ||
635 | # Add the initramfs | ||
636 | if initramfs_image and initramfs_image_bundle != "1": | ||
637 | req_sections['ramdisk-1'] = { | ||
638 | "Type": "RAMDisk Image", | ||
639 | "Load Address": bb_vars['UBOOT_RD_LOADADDRESS'], | ||
640 | "Entry Point": bb_vars['UBOOT_RD_ENTRYPOINT'] | ||
641 | } | ||
642 | # Create a configuration section for each DTB | ||
643 | if dtb_files: | ||
644 | for dtb in dtb_files + dtb_symlinks: | ||
645 | conf_name = bb_vars['FIT_CONF_PREFIX'] + dtb | ||
646 | # Assume that DTBs with an "-alias" in its name are symlink DTBs created e.g. by the | ||
647 | # bbb-dtbs-as-ext test recipe. Make the configuration node pointing to the real DTB. | ||
648 | real_dtb = dtb.replace("-alias", "") | ||
649 | # dtb overlays do not refer to a kernel (yet?) | ||
650 | if dtb.endswith('.dtbo'): | ||
651 | req_sections[conf_name] = { | ||
652 | "FDT": 'fdt-' + real_dtb, | ||
653 | } | ||
654 | else: | ||
655 | req_sections[conf_name] = { | ||
656 | "Kernel": "kernel-1", | ||
657 | "FDT": 'fdt-' + real_dtb, | ||
658 | } | ||
659 | if initramfs_image and initramfs_image_bundle != "1": | ||
660 | req_sections[conf_name]['Init Ramdisk'] = "ramdisk-1" | ||
661 | else: | ||
662 | conf_name = bb_vars['FIT_CONF_PREFIX'] + '1' | ||
663 | req_sections[conf_name] = { | ||
664 | "Kernel": "kernel-1" | ||
665 | } | ||
666 | if initramfs_image and initramfs_image_bundle != "1": | ||
667 | req_sections[conf_name]['Init Ramdisk'] = "ramdisk-1" | ||
668 | |||
669 | # Add signing related properties if needed | ||
670 | if uboot_sign_enable == "1": | ||
671 | for section in req_sections: | ||
672 | req_sections[section]['Hash algo'] = fit_hash_alg | ||
673 | if section.startswith(bb_vars['FIT_CONF_PREFIX']): | ||
674 | req_sections[section]['Hash value'] = "unavailable" | ||
675 | req_sections[section]['Sign algo'] = "%s,%s:%s" % (fit_hash_alg, fit_sign_alg, uboot_sign_keyname) | ||
676 | num_signatures += 1 | ||
677 | elif fit_sign_individual == "1": | ||
678 | req_sections[section]['Sign algo'] = "%s,%s:%s" % (fit_hash_alg, fit_sign_alg, uboot_sign_img_keyname) | ||
679 | num_signatures += 1 | ||
680 | return (req_sections, num_signatures) | ||
681 | |||
682 | def _check_signing(self, bb_vars, sections, num_signatures, uboot_tools_bindir, fitimage_path): | ||
683 | """Verify the signature nodes in the FIT image""" | ||
684 | if bb_vars['UBOOT_SIGN_ENABLE'] == "1": | ||
685 | self.logger.debug("Verifying signatures in the FIT image") | ||
686 | else: | ||
687 | self.logger.debug("FIT image is not signed. Signature verification is not needed.") | ||
688 | return | ||
689 | |||
690 | fit_hash_alg = bb_vars['FIT_HASH_ALG'] | ||
691 | fit_sign_alg = bb_vars['FIT_SIGN_ALG'] | ||
692 | uboot_sign_keyname = bb_vars['UBOOT_SIGN_KEYNAME'] | ||
693 | uboot_sign_img_keyname = bb_vars['UBOOT_SIGN_IMG_KEYNAME'] | ||
694 | deploy_dir_image = bb_vars['DEPLOY_DIR_IMAGE'] | ||
695 | kernel_deploysubdir = bb_vars['KERNEL_DEPLOYSUBDIR'] | ||
696 | fit_sign_individual = bb_vars['FIT_SIGN_INDIVIDUAL'] | ||
697 | fit_hash_alg_len = FitImageTestCase.MKIMAGE_HASH_LENGTHS[fit_hash_alg] | ||
698 | fit_sign_alg_len = FitImageTestCase.MKIMAGE_SIGNATURE_LENGTHS[fit_sign_alg] | ||
699 | for section, values in sections.items(): | ||
700 | # Configuration nodes are always signed with UBOOT_SIGN_KEYNAME (if UBOOT_SIGN_ENABLE = "1") | ||
701 | if section.startswith(bb_vars['FIT_CONF_PREFIX']): | ||
702 | sign_algo = values.get('Sign algo', None) | ||
703 | req_sign_algo = "%s,%s:%s" % (fit_hash_alg, fit_sign_alg, uboot_sign_keyname) | ||
704 | self.assertEqual(sign_algo, req_sign_algo, 'Signature algorithm for %s not expected value' % section) | ||
705 | sign_value = values.get('Sign value', None) | ||
706 | self.assertEqual(len(sign_value), fit_sign_alg_len, 'Signature value for section %s not expected length' % section) | ||
707 | dtb_file_name = section.replace(bb_vars['FIT_CONF_PREFIX'], '') | ||
708 | dtb_path = os.path.join(deploy_dir_image, dtb_file_name) | ||
709 | if kernel_deploysubdir: | ||
710 | dtb_path = os.path.join(deploy_dir_image, kernel_deploysubdir, dtb_file_name) | ||
711 | # External devicetrees created by devicetree.bbclass are in a subfolder and have priority | ||
712 | dtb_path_ext = os.path.join(deploy_dir_image, "devicetree", dtb_file_name) | ||
713 | if os.path.exists(dtb_path_ext): | ||
714 | dtb_path = dtb_path_ext | ||
715 | self._verify_fit_image_signature(uboot_tools_bindir, fitimage_path, dtb_path, section) | ||
716 | else: | ||
717 | # Image nodes always need a hash which gets indirectly signed by the config signature | ||
718 | hash_algo = values.get('Hash algo', None) | ||
719 | self.assertEqual(hash_algo, fit_hash_alg) | ||
720 | hash_value = values.get('Hash value', None) | ||
721 | self.assertEqual(len(hash_value), fit_hash_alg_len, 'Hash value for section %s not expected length' % section) | ||
722 | # Optionally, if FIT_SIGN_INDIVIDUAL = 1 also the image nodes have a signature (which is redundant but possible) | ||
723 | if fit_sign_individual == "1": | ||
724 | sign_algo = values.get('Sign algo', None) | ||
725 | req_sign_algo = "%s,%s:%s" % (fit_hash_alg, fit_sign_alg, uboot_sign_img_keyname) | ||
726 | self.assertEqual(sign_algo, req_sign_algo, 'Signature algorithm for %s not expected value' % section) | ||
727 | sign_value = values.get('Sign value', None) | ||
728 | self.assertEqual(len(sign_value), fit_sign_alg_len, 'Signature value for section %s not expected length' % section) | ||
729 | |||
730 | # Search for the string passed to mkimage in each signed section of the FIT image. | ||
731 | # Looks like mkimage supports to add a comment but does not support to read it back. | ||
732 | a_comment = FitImageTestCase._get_uboot_mkimage_sign_args(bb_vars['UBOOT_MKIMAGE_SIGN_ARGS']) | ||
733 | self.logger.debug("a_comment: %s" % a_comment) | ||
734 | if a_comment: | ||
735 | found_comments = FitImageTestCase._find_string_in_bin_file(fitimage_path, a_comment) | ||
736 | self.assertEqual(found_comments, num_signatures, "Expected %d signed and commented (%s) sections in the fitImage." % | ||
737 | (num_signatures, a_comment)) | ||
738 | |||
739 | class KernelFitImageRecipeTests(KernelFitImageBase): | ||
740 | """Test cases for the kernel-fitimage bbclass""" | ||
13 | 741 | ||
14 | def test_fit_image(self): | 742 | def test_fit_image(self): |
15 | """ | 743 | """ |
@@ -25,10 +753,7 @@ class FitImageTests(OESelftestTestCase): | |||
25 | Author: Usama Arif <usama.arif@arm.com> | 753 | Author: Usama Arif <usama.arif@arm.com> |
26 | """ | 754 | """ |
27 | config = """ | 755 | config = """ |
28 | # Enable creation of fitImage | ||
29 | KERNEL_IMAGETYPE = "Image" | 756 | KERNEL_IMAGETYPE = "Image" |
30 | KERNEL_IMAGETYPES += " fitImage " | ||
31 | KERNEL_CLASSES = " kernel-fitimage " | ||
32 | 757 | ||
33 | # RAM disk variables including load address and entrypoint for kernel and RAM disk | 758 | # RAM disk variables including load address and entrypoint for kernel and RAM disk |
34 | IMAGE_FSTYPES += "cpio.gz" | 759 | IMAGE_FSTYPES += "cpio.gz" |
@@ -40,79 +765,145 @@ UBOOT_RD_ENTRYPOINT = "0x88000000" | |||
40 | UBOOT_LOADADDRESS = "0x80080000" | 765 | UBOOT_LOADADDRESS = "0x80080000" |
41 | UBOOT_ENTRYPOINT = "0x80080000" | 766 | UBOOT_ENTRYPOINT = "0x80080000" |
42 | FIT_DESC = "A model description" | 767 | FIT_DESC = "A model description" |
768 | FIT_CONF_PREFIX = "foo-" | ||
43 | """ | 769 | """ |
770 | config = self._config_add_kernel_classes(config) | ||
44 | self.write_config(config) | 771 | self.write_config(config) |
772 | bb_vars = self._fit_get_bb_vars() | ||
773 | self._test_fitimage(bb_vars) | ||
45 | 774 | ||
46 | # fitImage is created as part of linux recipe | 775 | def test_get_compatible_from_dtb(self): |
47 | image = "virtual/kernel" | 776 | """Test the oe.fitimage.get_compatible_from_dtb function |
48 | bitbake(image) | ||
49 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'INITRAMFS_IMAGE_NAME', 'KERNEL_FIT_LINK_NAME'], image) | ||
50 | |||
51 | fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], | ||
52 | "fitImage-its-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME'])) | ||
53 | fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], | ||
54 | "fitImage-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME'])) | ||
55 | |||
56 | self.assertTrue(os.path.exists(fitimage_its_path), | ||
57 | "%s image tree source doesn't exist" % (fitimage_its_path)) | ||
58 | self.assertTrue(os.path.exists(fitimage_path), | ||
59 | "%s FIT image doesn't exist" % (fitimage_path)) | ||
60 | |||
61 | # Check that the type, load address, entrypoint address and default | ||
62 | # values for kernel and ramdisk in Image Tree Source are as expected. | ||
63 | # The order of fields in the below array is important. Not all the | ||
64 | # fields are tested, only the key fields that wont vary between | ||
65 | # different architectures. | ||
66 | its_field_check = [ | ||
67 | 'description = "A model description";', | ||
68 | 'type = "kernel";', | ||
69 | 'load = <0x80080000>;', | ||
70 | 'entry = <0x80080000>;', | ||
71 | 'type = "ramdisk";', | ||
72 | 'load = <0x88000000>;', | ||
73 | 'entry = <0x88000000>;', | ||
74 | 'default = "conf-1";', | ||
75 | 'kernel = "kernel-1";', | ||
76 | 'ramdisk = "ramdisk-1";' | ||
77 | ] | ||
78 | 777 | ||
79 | with open(fitimage_its_path) as its_file: | 778 | 1. bitbake bbb-dtbs-as-ext |
80 | field_index = 0 | 779 | 2. Check if symlink_points_below returns the path to the DTB |
81 | for line in its_file: | 780 | 3. Check if the expected compatible string is found by get_compatible_from_dtb() |
82 | if field_index == len(its_field_check): | 781 | """ |
83 | break | 782 | DTB_RECIPE = "bbb-dtbs-as-ext" |
84 | if its_field_check[field_index] in line: | 783 | DTB_FILE = "am335x-bonegreen-ext.dtb" |
85 | field_index +=1 | 784 | DTB_SYMLINK = "am335x-bonegreen-ext-alias.dtb" |
785 | DTBO_FILE = "BBORG_RELAY-00A2.dtbo" | ||
786 | EXPECTED_COMP = ["ti,am335x-bone-green", "ti,am335x-bone-black", "ti,am335x-bone", "ti,am33xx"] | ||
86 | 787 | ||
87 | if field_index != len(its_field_check): # if its equal, the test passed | 788 | config = """ |
88 | self.assertTrue(field_index == len(its_field_check), | 789 | DISTRO = "poky" |
89 | "Fields in Image Tree Source File %s did not match, error in finding %s" | 790 | MACHINE = "beaglebone-yocto" |
90 | % (fitimage_its_path, its_field_check[field_index])) | 791 | """ |
792 | self.write_config(config) | ||
793 | |||
794 | # Provide the fdtget command called by get_compatible_from_dtb | ||
795 | dtc_bindir = FitImageTestCase._setup_native('dtc-native') | ||
796 | fdtget_path = os.path.join(dtc_bindir, "fdtget") | ||
797 | self.assertExists(fdtget_path) | ||
798 | |||
799 | # bitbake an external DTB with a symlink to it and a DTB overlay | ||
800 | bitbake(DTB_RECIPE) | ||
801 | deploy_dir_image = get_bb_var("DEPLOY_DIR_IMAGE", DTB_RECIPE) | ||
802 | devicetree_dir = os.path.join(deploy_dir_image, "devicetree") | ||
803 | dtb_path = os.path.join(devicetree_dir, DTB_FILE) | ||
804 | dtb_alias_path = os.path.join(devicetree_dir, DTB_SYMLINK) | ||
805 | dtbo_file = os.path.join(devicetree_dir, DTBO_FILE) | ||
806 | self.assertExists(dtb_path) | ||
807 | self.assertExists(dtb_alias_path) | ||
808 | self.assertExists(dtbo_file) | ||
809 | |||
810 | # Test symlink_points_below | ||
811 | linked_dtb = oe.fitimage.symlink_points_below(dtb_alias_path, devicetree_dir) | ||
812 | self.assertEqual(linked_dtb, DTB_FILE) | ||
813 | |||
814 | # Check if get_compatible_from_dtb finds the expected compatible string in the DTBs | ||
815 | comp = oe.fitimage.get_compatible_from_dtb(dtb_path, fdtget_path) | ||
816 | self.assertEqual(comp, EXPECTED_COMP) | ||
817 | comp_alias = oe.fitimage.get_compatible_from_dtb(dtb_alias_path, fdtget_path) | ||
818 | self.assertEqual(comp_alias, EXPECTED_COMP) | ||
819 | # The alias is a symlink, therefore the compatible string is equal | ||
820 | self.assertEqual(comp_alias, comp) | ||
821 | |||
822 | def test_fit_image_ext_dtb_dtbo(self): | ||
823 | """ | ||
824 | Summary: Check if FIT image and Image Tree Source (its) are created correctly. | ||
825 | Expected: 1) its and FIT image are built successfully | ||
826 | 2) The its file contains also the external devicetree overlay | ||
827 | 3) Dumping the FIT image indicates the devicetree overlay | ||
828 | """ | ||
829 | config = """ | ||
830 | # Enable creation of fitImage | ||
831 | MACHINE = "beaglebone-yocto" | ||
832 | # Add a devicetree overlay which does not need kernel sources | ||
833 | PREFERRED_PROVIDER_virtual/dtb = "bbb-dtbs-as-ext" | ||
834 | """ | ||
835 | config = self._config_add_kernel_classes(config) | ||
836 | config = self._config_add_uboot_env(config) | ||
837 | self.write_config(config) | ||
838 | bb_vars = self._fit_get_bb_vars() | ||
839 | self._test_fitimage(bb_vars) | ||
840 | |||
841 | |||
842 | def test_sign_fit_image_configurations(self): | ||
843 | """ | ||
844 | Summary: Check if FIT image and Image Tree Source (its) are created | ||
845 | and the configuration nodes are signed correctly. | ||
846 | Expected: 1) its and FIT image are built successfully | ||
847 | 2) Scanning the its file indicates signing is enabled | ||
848 | as requested by UBOOT_SIGN_ENABLE | ||
849 | 3) Dumping the FIT image indicates signature values | ||
850 | are present (only for the configuration nodes as | ||
851 | FIT_SIGN_INDIVIDUAL is disabled) | ||
852 | 4) Verify the FIT image contains the comments passed via | ||
853 | UBOOT_MKIMAGE_SIGN_ARGS once per configuration node. | ||
854 | """ | ||
855 | # Generate a configuration section which gets included into the local.conf file | ||
856 | config = """ | ||
857 | # Enable creation of fitImage | ||
858 | MACHINE = "beaglebone-yocto" | ||
859 | UBOOT_SIGN_ENABLE = "1" | ||
860 | UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" | ||
861 | UBOOT_SIGN_KEYNAME = "dev" | ||
862 | UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" | ||
863 | FIT_CONF_DEFAULT_DTB = "am335x-bonegreen.dtb" | ||
864 | """ | ||
865 | config = self._config_add_kernel_classes(config) | ||
866 | config = self._config_add_uboot_env(config) | ||
867 | self.write_config(config) | ||
868 | |||
869 | # Retrieve some variables from bitbake | ||
870 | bb_vars = self._fit_get_bb_vars([ | ||
871 | 'FIT_KEY_GENRSA_ARGS', | ||
872 | 'FIT_KEY_REQ_ARGS', | ||
873 | 'FIT_KEY_SIGN_PKCS', | ||
874 | 'FIT_SIGN_NUMBITS', | ||
875 | 'UBOOT_SIGN_KEYDIR', | ||
876 | ]) | ||
91 | 877 | ||
878 | self._gen_signing_key(bb_vars) | ||
879 | self._test_fitimage(bb_vars) | ||
92 | 880 | ||
93 | def test_sign_fit_image(self): | 881 | def test_sign_fit_image_individual(self): |
94 | """ | 882 | """ |
95 | Summary: Check if FIT image and Image Tree Source (its) are created | 883 | Summary: Check if FIT image and Image Tree Source (its) are created |
96 | and signed correctly. | 884 | and all nodes are signed correctly. |
97 | Expected: 1) its and FIT image are built successfully | 885 | Expected: 1) its and FIT image are built successfully |
98 | 2) Scanning the its file indicates signing is enabled | 886 | 2) Scanning the its file indicates signing is enabled |
99 | as requested by UBOOT_SIGN_ENABLE (using keys generated | 887 | as requested by UBOOT_SIGN_ENABLE |
100 | via FIT_GENERATE_KEYS) | ||
101 | 3) Dumping the FIT image indicates signature values | 888 | 3) Dumping the FIT image indicates signature values |
102 | are present (including for images as enabled via | 889 | are present (including for images as enabled via |
103 | FIT_SIGN_INDIVIDUAL) | 890 | FIT_SIGN_INDIVIDUAL) |
104 | 4) Examination of the do_assemble_fitimage runfile/logfile | 891 | This also implies that FIT_GENERATE_KEYS = "1" works. |
105 | indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN and | 892 | 4) Verify the FIT image contains the comments passed via |
106 | UBOOT_MKIMAGE_SIGN_ARGS are working as expected. | 893 | UBOOT_MKIMAGE_SIGN_ARGS once per image and per |
894 | configuration node. | ||
895 | Note: This test is mostly for backward compatibility. | ||
896 | The recommended approach is to sign the configuration nodes | ||
897 | which include also the hashes of all the images. Signing | ||
898 | all the images individually is therefore redundant. | ||
107 | Product: oe-core | 899 | Product: oe-core |
108 | Author: Paul Eggleton <paul.eggleton@microsoft.com> based upon | 900 | Author: Paul Eggleton <paul.eggleton@microsoft.com> based upon |
109 | work by Usama Arif <usama.arif@arm.com> | 901 | work by Usama Arif <usama.arif@arm.com> |
110 | """ | 902 | """ |
903 | # Generate a configuration section which gets included into the local.conf file | ||
111 | config = """ | 904 | config = """ |
112 | # Enable creation of fitImage | 905 | # Enable creation of fitImage |
113 | MACHINE = "beaglebone-yocto" | 906 | MACHINE = "beaglebone-yocto" |
114 | KERNEL_IMAGETYPES += " fitImage " | ||
115 | KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper " | ||
116 | UBOOT_SIGN_ENABLE = "1" | 907 | UBOOT_SIGN_ENABLE = "1" |
117 | FIT_GENERATE_KEYS = "1" | 908 | FIT_GENERATE_KEYS = "1" |
118 | UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" | 909 | UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" |
@@ -121,211 +912,494 @@ UBOOT_SIGN_KEYNAME = "cfg-oe-selftest" | |||
121 | FIT_SIGN_INDIVIDUAL = "1" | 912 | FIT_SIGN_INDIVIDUAL = "1" |
122 | UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" | 913 | UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" |
123 | """ | 914 | """ |
915 | config = self._config_add_kernel_classes(config) | ||
916 | config = self._config_add_uboot_env(config) | ||
124 | self.write_config(config) | 917 | self.write_config(config) |
918 | bb_vars = self._fit_get_bb_vars() | ||
125 | 919 | ||
126 | # fitImage is created as part of linux recipe | 920 | # Ensure new keys are generated and FIT_GENERATE_KEYS = "1" is tested |
127 | image = "virtual/kernel" | 921 | bitbake("kernel-signing-keys-native -c compile -f") |
128 | bitbake(image) | ||
129 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'KERNEL_FIT_LINK_NAME'], image) | ||
130 | |||
131 | fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], | ||
132 | "fitImage-its-%s" % (bb_vars['KERNEL_FIT_LINK_NAME'])) | ||
133 | fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], | ||
134 | "fitImage-%s.bin" % (bb_vars['KERNEL_FIT_LINK_NAME'])) | ||
135 | |||
136 | self.assertTrue(os.path.exists(fitimage_its_path), | ||
137 | "%s image tree source doesn't exist" % (fitimage_its_path)) | ||
138 | self.assertTrue(os.path.exists(fitimage_path), | ||
139 | "%s FIT image doesn't exist" % (fitimage_path)) | ||
140 | |||
141 | req_itspaths = [ | ||
142 | ['/', 'images', 'kernel-1'], | ||
143 | ['/', 'images', 'kernel-1', 'signature-1'], | ||
144 | ['/', 'images', 'fdt-am335x-boneblack.dtb'], | ||
145 | ['/', 'images', 'fdt-am335x-boneblack.dtb', 'signature-1'], | ||
146 | ['/', 'configurations', 'conf-am335x-boneblack.dtb'], | ||
147 | ['/', 'configurations', 'conf-am335x-boneblack.dtb', 'signature-1'], | ||
148 | ] | ||
149 | 922 | ||
150 | itspath = [] | 923 | self._test_fitimage(bb_vars) |
151 | itspaths = [] | ||
152 | linect = 0 | ||
153 | sigs = {} | ||
154 | with open(fitimage_its_path) as its_file: | ||
155 | linect += 1 | ||
156 | for line in its_file: | ||
157 | line = line.strip() | ||
158 | if line.endswith('};'): | ||
159 | itspath.pop() | ||
160 | elif line.endswith('{'): | ||
161 | itspath.append(line[:-1].strip()) | ||
162 | itspaths.append(itspath[:]) | ||
163 | elif itspath and itspath[-1] == 'signature-1': | ||
164 | itsdotpath = '.'.join(itspath) | ||
165 | if not itsdotpath in sigs: | ||
166 | sigs[itsdotpath] = {} | ||
167 | if not '=' in line or not line.endswith(';'): | ||
168 | self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line)) | ||
169 | key, value = line.split('=', 1) | ||
170 | sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';') | ||
171 | 924 | ||
172 | for reqpath in req_itspaths: | 925 | def test_fit_image_sign_initramfs(self): |
173 | if not reqpath in itspaths: | ||
174 | self.fail('Missing section in its file: %s' % reqpath) | ||
175 | |||
176 | reqsigvalues_image = { | ||
177 | 'algo': '"sha256,rsa2048"', | ||
178 | 'key-name-hint': '"img-oe-selftest"', | ||
179 | } | ||
180 | reqsigvalues_config = { | ||
181 | 'algo': '"sha256,rsa2048"', | ||
182 | 'key-name-hint': '"cfg-oe-selftest"', | ||
183 | 'sign-images': '"kernel", "fdt"', | ||
184 | } | ||
185 | |||
186 | for itspath, values in sigs.items(): | ||
187 | if 'conf-' in itspath: | ||
188 | reqsigvalues = reqsigvalues_config | ||
189 | else: | ||
190 | reqsigvalues = reqsigvalues_image | ||
191 | for reqkey, reqvalue in reqsigvalues.items(): | ||
192 | value = values.get(reqkey, None) | ||
193 | if value is None: | ||
194 | self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath)) | ||
195 | self.assertEqual(value, reqvalue) | ||
196 | |||
197 | # Dump the image to see if it really got signed | ||
198 | bitbake("u-boot-tools-native -c addto_recipe_sysroot") | ||
199 | result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=') | ||
200 | recipe_sysroot_native = result.output.split('=')[1].strip('"') | ||
201 | dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage') | ||
202 | result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path)) | ||
203 | in_signed = None | ||
204 | signed_sections = {} | ||
205 | for line in result.output.splitlines(): | ||
206 | if line.startswith((' Configuration', ' Image')): | ||
207 | in_signed = re.search(r'\((.*)\)', line).groups()[0] | ||
208 | elif re.match('^ *', line) in (' ', ''): | ||
209 | in_signed = None | ||
210 | elif in_signed: | ||
211 | if not in_signed in signed_sections: | ||
212 | signed_sections[in_signed] = {} | ||
213 | key, value = line.split(':', 1) | ||
214 | signed_sections[in_signed][key.strip()] = value.strip() | ||
215 | self.assertIn('kernel-1', signed_sections) | ||
216 | self.assertIn('fdt-am335x-boneblack.dtb', signed_sections) | ||
217 | self.assertIn('conf-am335x-boneblack.dtb', signed_sections) | ||
218 | for signed_section, values in signed_sections.items(): | ||
219 | value = values.get('Sign algo', None) | ||
220 | if signed_section.startswith("conf"): | ||
221 | self.assertEqual(value, 'sha256,rsa2048:cfg-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section) | ||
222 | else: | ||
223 | self.assertEqual(value, 'sha256,rsa2048:img-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section) | ||
224 | value = values.get('Sign value', None) | ||
225 | self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) | ||
226 | |||
227 | # Check for UBOOT_MKIMAGE_SIGN_ARGS | ||
228 | result = runCmd('bitbake -e virtual/kernel | grep ^T=') | ||
229 | tempdir = result.output.split('=', 1)[1].strip().strip('') | ||
230 | result = runCmd('grep "a smart comment" %s/run.do_assemble_fitimage' % tempdir, ignore_status=True) | ||
231 | self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN_ARGS value did not get used') | ||
232 | |||
233 | # Check for evidence of test-mkimage-wrapper class | ||
234 | result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True) | ||
235 | self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work') | ||
236 | result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True) | ||
237 | self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work') | ||
238 | |||
239 | def test_uboot_fit_image(self): | ||
240 | """ | 926 | """ |
241 | Summary: Check if Uboot FIT image and Image Tree Source | 927 | Summary: Verifies the content of the initramfs node in the FIT Image Tree Source (its) |
242 | (its) are built and the Image Tree Source has the | 928 | The FIT settings are set by the test case. |
243 | correct fields. | 929 | The machine used is beaglebone-yocto. |
244 | Expected: 1. u-boot-fitImage and u-boot-its can be built | 930 | Expected: 1. The ITS is generated with initramfs support |
245 | 2. The type, load address, entrypoint address and | 931 | 2. All the fields in the kernel node are as expected (matching the |
246 | default values of U-boot image are correct in the | 932 | conf settings) |
247 | Image Tree Source. Not all the fields are tested, | 933 | 3. The kernel is included in all the available configurations and |
248 | only the key fields that wont vary between | 934 | its hash is included in the configuration signature |
249 | different architectures. | 935 | |
250 | Product: oe-core | 936 | Product: oe-core |
251 | Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> | 937 | Author: Abdellatif El Khlifi <abdellatif.elkhlifi@arm.com> |
252 | based on work by Usama Arif <usama.arif@arm.com> | ||
253 | """ | 938 | """ |
939 | |||
254 | config = """ | 940 | config = """ |
255 | # We need at least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set | 941 | DISTRO = "poky" |
256 | MACHINE = "qemuarm" | 942 | MACHINE = "beaglebone-yocto" |
257 | UBOOT_MACHINE = "am57xx_evm_defconfig" | 943 | INITRAMFS_IMAGE = "core-image-minimal-initramfs" |
258 | SPL_BINARY = "MLO" | 944 | INITRAMFS_SCRIPTS = "" |
945 | UBOOT_MACHINE = "am335x_evm_defconfig" | ||
946 | UBOOT_SIGN_ENABLE = "1" | ||
947 | UBOOT_SIGN_KEYNAME = "beaglebonekey" | ||
948 | UBOOT_SIGN_KEYDIR ?= "${DEPLOY_DIR_IMAGE}" | ||
949 | UBOOT_DTB_BINARY = "u-boot.dtb" | ||
950 | UBOOT_ENTRYPOINT = "0x80000000" | ||
951 | UBOOT_LOADADDRESS = "0x80000000" | ||
952 | UBOOT_RD_LOADADDRESS = "0x88000000" | ||
953 | UBOOT_RD_ENTRYPOINT = "0x88000000" | ||
954 | UBOOT_DTB_LOADADDRESS = "0x82000000" | ||
955 | UBOOT_ARCH = "arm" | ||
956 | UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" | ||
957 | UBOOT_MKIMAGE_KERNEL_TYPE = "kernel" | ||
958 | UBOOT_EXTLINUX = "0" | ||
959 | KERNEL_IMAGETYPE_REPLACEMENT = "zImage" | ||
960 | FIT_KERNEL_COMP_ALG = "none" | ||
961 | FIT_HASH_ALG = "sha256" | ||
962 | """ | ||
963 | config = self._config_add_kernel_classes(config) | ||
964 | config = self._config_add_uboot_env(config) | ||
965 | self.write_config(config) | ||
259 | 966 | ||
260 | # Enable creation of the U-Boot fitImage | 967 | # Retrieve some variables from bitbake |
261 | UBOOT_FITIMAGE_ENABLE = "1" | 968 | bb_vars = self._fit_get_bb_vars([ |
969 | 'FIT_KEY_GENRSA_ARGS', | ||
970 | 'FIT_KEY_REQ_ARGS', | ||
971 | 'FIT_KEY_SIGN_PKCS', | ||
972 | 'FIT_SIGN_NUMBITS', | ||
973 | 'UBOOT_SIGN_KEYDIR', | ||
974 | ]) | ||
262 | 975 | ||
263 | # (U-boot) fitImage properties | 976 | self._gen_signing_key(bb_vars) |
264 | UBOOT_LOADADDRESS = "0x80080000" | 977 | self._test_fitimage(bb_vars) |
265 | UBOOT_ENTRYPOINT = "0x80080000" | 978 | |
266 | UBOOT_FIT_DESC = "A model description" | 979 | def test_fit_image_sign_initramfs_bundle(self): |
980 | """ | ||
981 | Summary: Verifies the content of the initramfs bundle node in the FIT Image Tree Source (its) | ||
982 | The FIT settings are set by the test case. | ||
983 | The machine used is beaglebone-yocto. | ||
984 | Expected: 1. The ITS is generated with initramfs bundle support | ||
985 | 2. All the fields in the kernel node are as expected (matching the | ||
986 | conf settings) | ||
987 | 3. The kernel is included in all the available configurations and | ||
988 | its hash is included in the configuration signature | ||
989 | |||
990 | Product: oe-core | ||
991 | Author: Abdellatif El Khlifi <abdellatif.elkhlifi@arm.com> | ||
992 | """ | ||
267 | 993 | ||
268 | # Enable creation of Kernel fitImage | 994 | config = """ |
269 | KERNEL_IMAGETYPES += " fitImage " | 995 | DISTRO = "poky" |
270 | KERNEL_CLASSES = " kernel-fitimage" | 996 | MACHINE = "beaglebone-yocto" |
997 | INITRAMFS_IMAGE_BUNDLE = "1" | ||
998 | INITRAMFS_IMAGE = "core-image-minimal-initramfs" | ||
999 | INITRAMFS_SCRIPTS = "" | ||
1000 | UBOOT_MACHINE = "am335x_evm_defconfig" | ||
271 | UBOOT_SIGN_ENABLE = "1" | 1001 | UBOOT_SIGN_ENABLE = "1" |
272 | FIT_GENERATE_KEYS = "1" | 1002 | UBOOT_SIGN_KEYNAME = "beaglebonekey" |
273 | UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" | 1003 | UBOOT_SIGN_KEYDIR ?= "${DEPLOY_DIR_IMAGE}" |
274 | UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest" | 1004 | UBOOT_DTB_BINARY = "u-boot.dtb" |
275 | UBOOT_SIGN_KEYNAME = "cfg-oe-selftest" | 1005 | UBOOT_ENTRYPOINT = "0x80000000" |
276 | FIT_SIGN_INDIVIDUAL = "1" | 1006 | UBOOT_LOADADDRESS = "0x80000000" |
1007 | UBOOT_DTB_LOADADDRESS = "0x82000000" | ||
1008 | UBOOT_ARCH = "arm" | ||
1009 | UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" | ||
1010 | UBOOT_MKIMAGE_KERNEL_TYPE = "kernel" | ||
1011 | UBOOT_EXTLINUX = "0" | ||
1012 | KERNEL_IMAGETYPE_REPLACEMENT = "zImage" | ||
1013 | FIT_KERNEL_COMP_ALG = "none" | ||
1014 | FIT_HASH_ALG = "sha256" | ||
277 | """ | 1015 | """ |
1016 | config = self._config_add_kernel_classes(config) | ||
1017 | config = self._config_add_uboot_env(config) | ||
278 | self.write_config(config) | 1018 | self.write_config(config) |
1019 | bb_vars = self._fit_get_bb_vars() | ||
1020 | self._gen_signing_key(bb_vars) | ||
1021 | self._test_fitimage(bb_vars) | ||
1022 | |||
1023 | class FitImagePyTests(KernelFitImageBase): | ||
1024 | """Test cases for the fitimage.py module without calling bitbake""" | ||
1025 | |||
1026 | def _test_fitimage_py(self, bb_vars_overrides=None): | ||
1027 | topdir = os.path.join(os.environ['BUILDDIR']) | ||
1028 | fitimage_its_path = os.path.join(topdir, self._testMethodName + '.its') | ||
1029 | |||
1030 | # Provide variables without calling bitbake | ||
1031 | bb_vars = { | ||
1032 | # image-fitimage.conf | ||
1033 | 'FIT_DESC': "Kernel fitImage for a dummy distro", | ||
1034 | 'FIT_HASH_ALG': "sha256", | ||
1035 | 'FIT_SIGN_ALG': "rsa2048", | ||
1036 | 'FIT_PAD_ALG': "pkcs-1.5", | ||
1037 | 'FIT_GENERATE_KEYS': "0", | ||
1038 | 'FIT_SIGN_NUMBITS': "2048", | ||
1039 | 'FIT_KEY_GENRSA_ARGS': "-F4", | ||
1040 | 'FIT_KEY_REQ_ARGS': "-batch -new", | ||
1041 | 'FIT_KEY_SIGN_PKCS': "-x509", | ||
1042 | 'FIT_SIGN_INDIVIDUAL': "0", | ||
1043 | 'FIT_CONF_PREFIX': "conf-", | ||
1044 | 'FIT_SUPPORTED_INITRAMFS_FSTYPES': "cpio.lz4 cpio.lzo cpio.lzma cpio.xz cpio.zst cpio.gz ext2.gz cpio", | ||
1045 | 'FIT_CONF_DEFAULT_DTB': "", | ||
1046 | 'FIT_ADDRESS_CELLS': "1", | ||
1047 | 'FIT_UBOOT_ENV': "", | ||
1048 | # kernel.bbclass | ||
1049 | 'UBOOT_ENTRYPOINT': "0x20008000", | ||
1050 | 'UBOOT_LOADADDRESS': "0x20008000", | ||
1051 | 'INITRAMFS_IMAGE': "", | ||
1052 | 'INITRAMFS_IMAGE_BUNDLE': "", | ||
1053 | # kernel-uboot.bbclass | ||
1054 | 'FIT_KERNEL_COMP_ALG': "gzip", | ||
1055 | 'FIT_KERNEL_COMP_ALG_EXTENSION': ".gz", | ||
1056 | 'UBOOT_MKIMAGE_KERNEL_TYPE': "kernel", | ||
1057 | # uboot-config.bbclass | ||
1058 | 'UBOOT_MKIMAGE_DTCOPTS': "", | ||
1059 | 'UBOOT_MKIMAGE': "uboot-mkimage", | ||
1060 | 'UBOOT_MKIMAGE_SIGN': "uboot-mkimage", | ||
1061 | 'UBOOT_MKIMAGE_SIGN_ARGS': "", | ||
1062 | 'UBOOT_SIGN_ENABLE': "0", | ||
1063 | 'UBOOT_SIGN_KEYDIR': None, | ||
1064 | 'UBOOT_SIGN_KEYNAME': None, | ||
1065 | 'UBOOT_SIGN_IMG_KEYNAME': None, | ||
1066 | # others | ||
1067 | 'MACHINE': "qemux86-64", | ||
1068 | 'UBOOT_ARCH': "x86", | ||
1069 | 'HOST_PREFIX': "x86_64-poky-linux-" | ||
1070 | } | ||
1071 | if bb_vars_overrides: | ||
1072 | bb_vars.update(bb_vars_overrides) | ||
1073 | |||
1074 | root_node = oe.fitimage.ItsNodeRootKernel( | ||
1075 | bb_vars["FIT_DESC"], bb_vars["FIT_ADDRESS_CELLS"], | ||
1076 | bb_vars['HOST_PREFIX'], bb_vars['UBOOT_ARCH'], bb_vars["FIT_CONF_PREFIX"], | ||
1077 | oe.types.boolean(bb_vars['UBOOT_SIGN_ENABLE']), bb_vars["UBOOT_SIGN_KEYDIR"], | ||
1078 | bb_vars["UBOOT_MKIMAGE"], bb_vars["UBOOT_MKIMAGE_DTCOPTS"], | ||
1079 | bb_vars["UBOOT_MKIMAGE_SIGN"], bb_vars["UBOOT_MKIMAGE_SIGN_ARGS"], | ||
1080 | bb_vars['FIT_HASH_ALG'], bb_vars['FIT_SIGN_ALG'], bb_vars['FIT_PAD_ALG'], | ||
1081 | bb_vars['UBOOT_SIGN_KEYNAME'], | ||
1082 | oe.types.boolean(bb_vars['FIT_SIGN_INDIVIDUAL']), bb_vars['UBOOT_SIGN_IMG_KEYNAME'] | ||
1083 | ) | ||
1084 | |||
1085 | root_node.fitimage_emit_section_kernel("kernel-1", "linux.bin", "none", | ||
1086 | bb_vars.get('UBOOT_LOADADDRESS'), bb_vars.get('UBOOT_ENTRYPOINT'), | ||
1087 | bb_vars.get('UBOOT_MKIMAGE_KERNEL_TYPE'), bb_vars.get("UBOOT_ENTRYSYMBOL") | ||
1088 | ) | ||
1089 | |||
1090 | dtb_files, _ = FitImageTestCase._get_dtb_files(bb_vars) | ||
1091 | for dtb in dtb_files: | ||
1092 | root_node.fitimage_emit_section_dtb(dtb, os.path.join("a-dir", dtb), | ||
1093 | bb_vars.get("UBOOT_DTB_LOADADDRESS"), bb_vars.get("UBOOT_DTBO_LOADADDRESS")) | ||
1094 | |||
1095 | if bb_vars.get('FIT_UBOOT_ENV'): | ||
1096 | root_node.fitimage_emit_section_boot_script( | ||
1097 | "bootscr-" + bb_vars['FIT_UBOOT_ENV'], bb_vars['FIT_UBOOT_ENV']) | ||
1098 | |||
1099 | if bb_vars['MACHINE'] == "qemux86-64": # Not really the right if | ||
1100 | root_node.fitimage_emit_section_setup("setup-1", "setup1.bin") | ||
1101 | |||
1102 | if bb_vars.get('INITRAMFS_IMAGE') and bb_vars.get("INITRAMFS_IMAGE_BUNDLE") != "1": | ||
1103 | root_node.fitimage_emit_section_ramdisk("ramdisk-1", "a-dir/a-initramfs-1", | ||
1104 | "core-image-minimal-initramfs", | ||
1105 | bb_vars.get("UBOOT_RD_LOADADDRESS"), bb_vars.get("UBOOT_RD_ENTRYPOINT")) | ||
1106 | |||
1107 | root_node.fitimage_emit_section_config(bb_vars['FIT_CONF_DEFAULT_DTB']) | ||
1108 | root_node.write_its_file(fitimage_its_path) | ||
1109 | |||
1110 | self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path)) | ||
1111 | self.logger.debug("Checking its: %s" % fitimage_its_path) | ||
1112 | self._check_its_file(bb_vars, fitimage_its_path) | ||
1113 | |||
1114 | def test_fitimage_py_default(self): | ||
1115 | self._test_fitimage_py() | ||
1116 | |||
1117 | def test_fitimage_py_default_dtb(self): | ||
1118 | bb_vars_overrides = { | ||
1119 | 'KERNEL_DEVICETREE': "one.dtb two.dtb three.dtb", | ||
1120 | 'FIT_CONF_DEFAULT_DTB': "two.dtb" | ||
1121 | } | ||
1122 | self._test_fitimage_py(bb_vars_overrides) | ||
1123 | |||
1124 | |||
1125 | class UBootFitImageTests(FitImageTestCase): | ||
1126 | """Test cases for the uboot-sign bbclass""" | ||
279 | 1127 | ||
280 | # The U-Boot fitImage is created as part of the U-Boot recipe | 1128 | BOOTLOADER_RECIPE = "virtual/bootloader" |
281 | bitbake("virtual/bootloader") | 1129 | |
282 | 1130 | def _fit_get_bb_vars(self, additional_vars=[]): | |
283 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | 1131 | """Get bb_vars as needed by _test_sign_fit_image |
284 | machine = get_bb_var('MACHINE') | 1132 | |
285 | fitimage_its_path = os.path.join(deploy_dir_image, | 1133 | Call the get_bb_vars function once and get all variables needed by the test case. |
286 | "u-boot-its-%s" % (machine,)) | 1134 | """ |
287 | fitimage_path = os.path.join(deploy_dir_image, | 1135 | internal_used = { |
288 | "u-boot-fitImage-%s" % (machine,)) | 1136 | 'DEPLOY_DIR_IMAGE', |
289 | 1137 | 'FIT_HASH_ALG', | |
290 | self.assertTrue(os.path.exists(fitimage_its_path), | 1138 | 'FIT_KEY_GENRSA_ARGS', |
291 | "%s image tree source doesn't exist" % (fitimage_its_path)) | 1139 | 'FIT_KEY_REQ_ARGS', |
292 | self.assertTrue(os.path.exists(fitimage_path), | 1140 | 'FIT_KEY_SIGN_PKCS', |
293 | "%s FIT image doesn't exist" % (fitimage_path)) | 1141 | 'FIT_SIGN_ALG', |
294 | 1142 | 'FIT_SIGN_INDIVIDUAL', | |
295 | # Check that the type, load address, entrypoint address and default | 1143 | 'FIT_SIGN_NUMBITS', |
296 | # values for kernel and ramdisk in Image Tree Source are as expected. | 1144 | 'MACHINE', |
297 | # The order of fields in the below array is important. Not all the | 1145 | 'SPL_MKIMAGE_SIGN_ARGS', |
298 | # fields are tested, only the key fields that wont vary between | 1146 | 'SPL_SIGN_ENABLE', |
299 | # different architectures. | 1147 | 'SPL_SIGN_KEYNAME', |
1148 | 'UBOOT_ARCH', | ||
1149 | 'UBOOT_DTB_BINARY', | ||
1150 | 'UBOOT_DTB_IMAGE', | ||
1151 | 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT', | ||
1152 | 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS', | ||
1153 | 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE', | ||
1154 | 'UBOOT_FIT_CONF_USER_LOADABLES', | ||
1155 | 'UBOOT_FIT_DESC', | ||
1156 | 'UBOOT_FIT_HASH_ALG', | ||
1157 | 'UBOOT_FIT_SIGN_ALG', | ||
1158 | 'UBOOT_FIT_TEE_ENTRYPOINT', | ||
1159 | 'UBOOT_FIT_TEE_LOADADDRESS', | ||
1160 | 'UBOOT_FIT_TEE', | ||
1161 | 'UBOOT_FIT_UBOOT_ENTRYPOINT', | ||
1162 | 'UBOOT_FIT_UBOOT_LOADADDRESS', | ||
1163 | 'UBOOT_FIT_USER_SETTINGS', | ||
1164 | 'UBOOT_FITIMAGE_ENABLE', | ||
1165 | 'UBOOT_NODTB_BINARY', | ||
1166 | 'UBOOT_SIGN_ENABLE', | ||
1167 | 'UBOOT_SIGN_IMG_KEYNAME', | ||
1168 | 'UBOOT_SIGN_KEYDIR', | ||
1169 | 'UBOOT_SIGN_KEYNAME', | ||
1170 | } | ||
1171 | bb_vars = get_bb_vars(list(internal_used | set(additional_vars)), UBootFitImageTests.BOOTLOADER_RECIPE) | ||
1172 | self.logger.debug("bb_vars: %s" % pprint.pformat(bb_vars, indent=4)) | ||
1173 | return bb_vars | ||
1174 | |||
1175 | def _bitbake_fit_image(self, bb_vars): | ||
1176 | """Bitbake the bootloader and return the paths to the its file and the FIT image""" | ||
1177 | bitbake(UBootFitImageTests.BOOTLOADER_RECIPE) | ||
1178 | |||
1179 | deploy_dir_image = bb_vars['DEPLOY_DIR_IMAGE'] | ||
1180 | machine = bb_vars['MACHINE'] | ||
1181 | fitimage_its_path = os.path.join(deploy_dir_image, "u-boot-its-%s" % machine) | ||
1182 | fitimage_path = os.path.join(deploy_dir_image, "u-boot-fitImage-%s" % machine) | ||
1183 | return (fitimage_its_path, fitimage_path) | ||
1184 | |||
1185 | def _get_req_its_paths(self, bb_vars): | ||
1186 | # image nodes | ||
1187 | images = [ 'uboot', 'fdt', ] | ||
1188 | if bb_vars['UBOOT_FIT_TEE'] == "1": | ||
1189 | images.append('tee') | ||
1190 | if bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE'] == "1": | ||
1191 | images.append('atf') | ||
1192 | # if bb_vars['UBOOT_FIT_USER_SETTINGS']: | ||
1193 | |||
1194 | # configuration nodes | ||
1195 | configurations = [ 'conf'] | ||
1196 | |||
1197 | # Create a list of paths for all image and configuration nodes | ||
1198 | req_its_paths = [] | ||
1199 | for image in images: | ||
1200 | req_its_paths.append(['/', 'images', image]) | ||
1201 | if bb_vars['SPL_SIGN_ENABLE'] == "1": | ||
1202 | req_its_paths.append(['/', 'images', image, 'signature']) | ||
1203 | for configuration in configurations: | ||
1204 | req_its_paths.append(['/', 'configurations', configuration]) | ||
1205 | return req_its_paths | ||
1206 | |||
1207 | def _get_req_its_fields(self, bb_vars): | ||
1208 | loadables = ["uboot"] | ||
300 | its_field_check = [ | 1209 | its_field_check = [ |
301 | 'description = "A model description";', | 1210 | 'description = "%s";' % bb_vars['UBOOT_FIT_DESC'], |
1211 | 'description = "U-Boot image";', | ||
1212 | 'data = /incbin/("%s");' % bb_vars['UBOOT_NODTB_BINARY'], | ||
302 | 'type = "standalone";', | 1213 | 'type = "standalone";', |
303 | 'load = <0x80080000>;', | 1214 | 'os = "u-boot";', |
304 | 'entry = <0x80080000>;', | 1215 | 'arch = "%s";' % bb_vars['UBOOT_ARCH'], |
305 | 'default = "conf";', | 1216 | 'compression = "none";', |
306 | 'loadables = "uboot";', | 1217 | 'load = <%s>;' % bb_vars['UBOOT_FIT_UBOOT_LOADADDRESS'], |
307 | 'fdt = "fdt";' | 1218 | 'entry = <%s>;' % bb_vars['UBOOT_FIT_UBOOT_ENTRYPOINT'], |
1219 | 'description = "U-Boot FDT";', | ||
1220 | 'data = /incbin/("%s");' % bb_vars['UBOOT_DTB_BINARY'], | ||
1221 | 'type = "flat_dt";', | ||
1222 | 'arch = "%s";' % bb_vars['UBOOT_ARCH'], | ||
1223 | 'compression = "none";', | ||
1224 | ] | ||
1225 | if bb_vars['UBOOT_FIT_TEE'] == "1": | ||
1226 | its_field_check += [ | ||
1227 | 'description = "Trusted Execution Environment";', | ||
1228 | 'data = /incbin/("%s");' % bb_vars['UBOOT_FIT_TEE_IMAGE'], | ||
1229 | 'type = "tee";', | ||
1230 | 'arch = "%s";' % bb_vars['UBOOT_ARCH'], | ||
1231 | 'os = "tee";', | ||
1232 | 'load = <%s>;' % bb_vars['UBOOT_FIT_TEE_LOADADDRESS'], | ||
1233 | 'entry = <%s>;' % bb_vars['UBOOT_FIT_TEE_ENTRYPOINT'], | ||
1234 | 'compression = "none";', | ||
308 | ] | 1235 | ] |
1236 | loadables.insert(0, "tee") | ||
1237 | if bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE'] == "1": | ||
1238 | its_field_check += [ | ||
1239 | 'description = "ARM Trusted Firmware";', | ||
1240 | 'data = /incbin/("%s");' % bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE'], | ||
1241 | 'type = "firmware";', | ||
1242 | 'arch = "%s";' % bb_vars['UBOOT_ARCH'], | ||
1243 | 'os = "arm-trusted-firmware";', | ||
1244 | 'load = <%s>;' % bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS'], | ||
1245 | 'entry = <%s>;' % bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT'], | ||
1246 | 'compression = "none";', | ||
1247 | ] | ||
1248 | loadables.insert(0, "atf") | ||
1249 | its_field_check += [ | ||
1250 | 'default = "conf";', | ||
1251 | 'description = "Boot with signed U-Boot FIT";', | ||
1252 | 'loadables = "%s";' % '", "'.join(loadables), | ||
1253 | 'fdt = "fdt";', | ||
1254 | ] | ||
1255 | return its_field_check | ||
1256 | |||
1257 | def _get_req_sigvalues_config(self, bb_vars): | ||
1258 | # COnfigurations are not signed by uboot-sign | ||
1259 | return {} | ||
1260 | |||
1261 | def _get_req_sigvalues_image(self, bb_vars): | ||
1262 | if bb_vars['SPL_SIGN_ENABLE'] != "1": | ||
1263 | return {} | ||
1264 | req_sigvalues_image = { | ||
1265 | 'algo': '"%s,%s"' % (bb_vars['UBOOT_FIT_HASH_ALG'], bb_vars['UBOOT_FIT_SIGN_ALG']), | ||
1266 | 'key-name-hint': '"%s"' % bb_vars['SPL_SIGN_KEYNAME'], | ||
1267 | } | ||
1268 | return req_sigvalues_image | ||
309 | 1269 | ||
310 | with open(fitimage_its_path) as its_file: | 1270 | def _get_req_sections(self, bb_vars): |
311 | field_index = 0 | 1271 | """Generate the expected output of dumpimage for beaglebone targets |
312 | for line in its_file: | 1272 | |
313 | if field_index == len(its_field_check): | 1273 | The dict generated by this function is supposed to be compared against |
314 | break | 1274 | the dict which is generated by the _dump_fitimage function. |
315 | if its_field_check[field_index] in line: | 1275 | """ |
316 | field_index +=1 | 1276 | loadables = ['uboot'] |
1277 | req_sections = { | ||
1278 | "uboot": { | ||
1279 | "Type": "Standalone Program", | ||
1280 | "Load Address": bb_vars['UBOOT_FIT_UBOOT_LOADADDRESS'], | ||
1281 | "Entry Point": bb_vars['UBOOT_FIT_UBOOT_ENTRYPOINT'], | ||
1282 | }, | ||
1283 | "fdt": { | ||
1284 | "Type": "Flat Device Tree", | ||
1285 | } | ||
1286 | } | ||
1287 | if bb_vars['UBOOT_FIT_TEE'] == "1": | ||
1288 | loadables.insert(0, "tee") | ||
1289 | req_sections['tee'] = { | ||
1290 | "Type": "Trusted Execution Environment Image", | ||
1291 | # "Load Address": bb_vars['UBOOT_FIT_TEE_LOADADDRESS'], not printed by mkimage? | ||
1292 | # "Entry Point": bb_vars['UBOOT_FIT_TEE_ENTRYPOINT'], not printed by mkimage? | ||
1293 | } | ||
1294 | if bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE'] == "1": | ||
1295 | loadables.insert(0, "atf") | ||
1296 | req_sections['atf'] = { | ||
1297 | "Type": "Firmware", | ||
1298 | "Load Address": bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS'], | ||
1299 | # "Entry Point": bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT'], not printed by mkimage? | ||
1300 | } | ||
1301 | req_sections["conf"] = { | ||
1302 | "Kernel": "unavailable", | ||
1303 | "FDT": "fdt", | ||
1304 | "Loadables": ','.join(loadables), | ||
1305 | } | ||
1306 | |||
1307 | # Add signing related properties if needed | ||
1308 | uboot_fit_hash_alg = bb_vars['UBOOT_FIT_HASH_ALG'] | ||
1309 | uboot_fit_sign_alg = bb_vars['UBOOT_FIT_SIGN_ALG'] | ||
1310 | spl_sign_enable = bb_vars['SPL_SIGN_ENABLE'] | ||
1311 | spl_sign_keyname = bb_vars['SPL_SIGN_KEYNAME'] | ||
1312 | num_signatures = 0 | ||
1313 | if spl_sign_enable == "1": | ||
1314 | for section in req_sections: | ||
1315 | if not section.startswith('conf'): | ||
1316 | req_sections[section]['Sign algo'] = "%s,%s:%s" % \ | ||
1317 | (uboot_fit_hash_alg, uboot_fit_sign_alg, spl_sign_keyname) | ||
1318 | num_signatures += 1 | ||
1319 | return (req_sections, num_signatures) | ||
1320 | |||
1321 | def _check_signing(self, bb_vars, sections, num_signatures, uboot_tools_bindir, fitimage_path): | ||
1322 | if bb_vars['UBOOT_FITIMAGE_ENABLE'] == '1' and bb_vars['SPL_SIGN_ENABLE'] == "1": | ||
1323 | self.logger.debug("Verifying signatures in the FIT image") | ||
1324 | else: | ||
1325 | self.logger.debug("FIT image is not signed. Signature verification is not needed.") | ||
1326 | return | ||
1327 | |||
1328 | uboot_fit_hash_alg = bb_vars['UBOOT_FIT_HASH_ALG'] | ||
1329 | uboot_fit_sign_alg = bb_vars['UBOOT_FIT_SIGN_ALG'] | ||
1330 | spl_sign_keyname = bb_vars['SPL_SIGN_KEYNAME'] | ||
1331 | fit_sign_alg_len = FitImageTestCase.MKIMAGE_SIGNATURE_LENGTHS[uboot_fit_sign_alg] | ||
1332 | for section, values in sections.items(): | ||
1333 | # Configuration nodes are always signed with UBOOT_SIGN_KEYNAME (if UBOOT_SIGN_ENABLE = "1") | ||
1334 | if section.startswith("conf"): | ||
1335 | # uboot-sign does not sign configuration nodes | ||
1336 | pass | ||
1337 | else: | ||
1338 | # uboot-sign does not add hash nodes, only image signatures | ||
1339 | sign_algo = values.get('Sign algo', None) | ||
1340 | req_sign_algo = "%s,%s:%s" % (uboot_fit_hash_alg, uboot_fit_sign_alg, spl_sign_keyname) | ||
1341 | self.assertEqual(sign_algo, req_sign_algo, 'Signature algorithm for %s not expected value' % section) | ||
1342 | sign_value = values.get('Sign value', None) | ||
1343 | self.assertEqual(len(sign_value), fit_sign_alg_len, 'Signature value for section %s not expected length' % section) | ||
1344 | |||
1345 | # Search for the string passed to mkimage in each signed section of the FIT image. | ||
1346 | # Looks like mkimage supports to add a comment but does not support to read it back. | ||
1347 | a_comment = FitImageTestCase._get_uboot_mkimage_sign_args(bb_vars['SPL_MKIMAGE_SIGN_ARGS']) | ||
1348 | self.logger.debug("a_comment: %s" % a_comment) | ||
1349 | if a_comment: | ||
1350 | found_comments = FitImageTestCase._find_string_in_bin_file(fitimage_path, a_comment) | ||
1351 | self.assertEqual(found_comments, num_signatures, "Expected %d signed and commented (%s) sections in the fitImage." % | ||
1352 | (num_signatures, a_comment)) | ||
1353 | |||
1354 | def _check_kernel_dtb(self, bb_vars): | ||
1355 | """ | ||
1356 | Check if the device-tree from U-Boot has the kernel public key(s). | ||
1357 | |||
1358 | The concat_dtb function of the uboot-sign.bbclass injects the public keys | ||
1359 | which are required for verifying the kernel at run-time into the DTB from | ||
1360 | U-Boot. The following example is from a build with FIT_SIGN_INDIVIDUAL | ||
1361 | set to "1". If it is set to "0" the key-the-kernel-image-key node is not | ||
1362 | present. | ||
1363 | / { | ||
1364 | ... | ||
1365 | signature { | ||
1366 | key-the-kernel-image-key { | ||
1367 | required = "image"; | ||
1368 | algo = "sha256,rsa2048"; | ||
1369 | ... | ||
1370 | }; | ||
1371 | key-the-kernel-config-key { | ||
1372 | required = "conf"; | ||
1373 | algo = "sha256,rsa2048"; | ||
1374 | ... | ||
1375 | }; | ||
1376 | }; | ||
1377 | """ | ||
1378 | # Setup u-boot-tools-native | ||
1379 | dtc_bindir = FitImageTestCase._setup_native('dtc-native') | ||
1380 | |||
1381 | # Check if 1 or 2 signature sections are in the DTB. | ||
1382 | uboot_dtb_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], bb_vars['UBOOT_DTB_IMAGE']) | ||
1383 | algo = "%s,%s" % (bb_vars['FIT_HASH_ALG'], bb_vars['FIT_SIGN_ALG']) | ||
1384 | if bb_vars['FIT_SIGN_INDIVIDUAL'] == "1": | ||
1385 | uboot_sign_img_keyname = bb_vars['UBOOT_SIGN_IMG_KEYNAME'] | ||
1386 | key_dtb_path = "/signature/key-" + uboot_sign_img_keyname | ||
1387 | self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "required", "image") | ||
1388 | self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "algo", algo) | ||
1389 | self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "key-name-hint", uboot_sign_img_keyname) | ||
1390 | |||
1391 | uboot_sign_keyname = bb_vars['UBOOT_SIGN_KEYNAME'] | ||
1392 | key_dtb_path = "/signature/key-" + uboot_sign_keyname | ||
1393 | self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "required", "conf") | ||
1394 | self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "algo", algo) | ||
1395 | self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "key-name-hint", uboot_sign_keyname) | ||
317 | 1396 | ||
318 | if field_index != len(its_field_check): # if its equal, the test passed | ||
319 | self.assertTrue(field_index == len(its_field_check), | ||
320 | "Fields in Image Tree Source File %s did not match, error in finding %s" | ||
321 | % (fitimage_its_path, its_field_check[field_index])) | ||
322 | 1397 | ||
323 | def test_uboot_sign_fit_image(self): | 1398 | def test_uboot_fit_image(self): |
324 | """ | 1399 | """ |
325 | Summary: Check if Uboot FIT image and Image Tree Source | 1400 | Summary: Check if Uboot FIT image and Image Tree Source |
326 | (its) are built and the Image Tree Source has the | 1401 | (its) are built and the Image Tree Source has the |
327 | correct fields, in the scenario where the Kernel | 1402 | correct fields. |
328 | is also creating/signing it's fitImage. | ||
329 | Expected: 1. u-boot-fitImage and u-boot-its can be built | 1403 | Expected: 1. u-boot-fitImage and u-boot-its can be built |
330 | 2. The type, load address, entrypoint address and | 1404 | 2. The type, load address, entrypoint address and |
331 | default values of U-boot image are correct in the | 1405 | default values of U-boot image are correct in the |
@@ -349,61 +1423,10 @@ UBOOT_FITIMAGE_ENABLE = "1" | |||
349 | UBOOT_LOADADDRESS = "0x80080000" | 1423 | UBOOT_LOADADDRESS = "0x80080000" |
350 | UBOOT_ENTRYPOINT = "0x80080000" | 1424 | UBOOT_ENTRYPOINT = "0x80080000" |
351 | UBOOT_FIT_DESC = "A model description" | 1425 | UBOOT_FIT_DESC = "A model description" |
352 | KERNEL_IMAGETYPES += " fitImage " | ||
353 | KERNEL_CLASSES = " kernel-fitimage " | ||
354 | INHERIT += "test-mkimage-wrapper" | ||
355 | UBOOT_SIGN_ENABLE = "1" | ||
356 | FIT_GENERATE_KEYS = "1" | ||
357 | UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" | ||
358 | UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest" | ||
359 | UBOOT_SIGN_KEYNAME = "cfg-oe-selftest" | ||
360 | FIT_SIGN_INDIVIDUAL = "1" | ||
361 | UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'" | ||
362 | """ | 1426 | """ |
363 | self.write_config(config) | 1427 | self.write_config(config) |
364 | 1428 | bb_vars = self._fit_get_bb_vars() | |
365 | # The U-Boot fitImage is created as part of the U-Boot recipe | 1429 | self._test_fitimage(bb_vars) |
366 | bitbake("virtual/bootloader") | ||
367 | |||
368 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | ||
369 | machine = get_bb_var('MACHINE') | ||
370 | fitimage_its_path = os.path.join(deploy_dir_image, | ||
371 | "u-boot-its-%s" % (machine,)) | ||
372 | fitimage_path = os.path.join(deploy_dir_image, | ||
373 | "u-boot-fitImage-%s" % (machine,)) | ||
374 | |||
375 | self.assertTrue(os.path.exists(fitimage_its_path), | ||
376 | "%s image tree source doesn't exist" % (fitimage_its_path)) | ||
377 | self.assertTrue(os.path.exists(fitimage_path), | ||
378 | "%s FIT image doesn't exist" % (fitimage_path)) | ||
379 | |||
380 | # Check that the type, load address, entrypoint address and default | ||
381 | # values for kernel and ramdisk in Image Tree Source are as expected. | ||
382 | # The order of fields in the below array is important. Not all the | ||
383 | # fields are tested, only the key fields that wont vary between | ||
384 | # different architectures. | ||
385 | its_field_check = [ | ||
386 | 'description = "A model description";', | ||
387 | 'type = "standalone";', | ||
388 | 'load = <0x80080000>;', | ||
389 | 'entry = <0x80080000>;', | ||
390 | 'default = "conf";', | ||
391 | 'loadables = "uboot";', | ||
392 | 'fdt = "fdt";' | ||
393 | ] | ||
394 | |||
395 | with open(fitimage_its_path) as its_file: | ||
396 | field_index = 0 | ||
397 | for line in its_file: | ||
398 | if field_index == len(its_field_check): | ||
399 | break | ||
400 | if its_field_check[field_index] in line: | ||
401 | field_index +=1 | ||
402 | |||
403 | if field_index != len(its_field_check): # if its equal, the test passed | ||
404 | self.assertTrue(field_index == len(its_field_check), | ||
405 | "Fields in Image Tree Source File %s did not match, error in finding %s" | ||
406 | % (fitimage_its_path, its_field_check[field_index])) | ||
407 | 1430 | ||
408 | 1431 | ||
409 | def test_sign_standalone_uboot_fit_image(self): | 1432 | def test_sign_standalone_uboot_fit_image(self): |
@@ -426,15 +1449,11 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'" | |||
426 | Usama Arif <usama.arif@arm.com> | 1449 | Usama Arif <usama.arif@arm.com> |
427 | """ | 1450 | """ |
428 | config = """ | 1451 | config = """ |
429 | # There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at | 1452 | # There's no U-boot defconfig with CONFIG_FIT_SIGNATURE yet, so we need at |
430 | # least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set | 1453 | # least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set |
431 | MACHINE = "qemuarm" | 1454 | MACHINE = "qemuarm" |
432 | UBOOT_MACHINE = "am57xx_evm_defconfig" | 1455 | UBOOT_MACHINE = "am57xx_evm_defconfig" |
433 | SPL_BINARY = "MLO" | 1456 | SPL_BINARY = "MLO" |
434 | # The kernel-fitimage class is a dependency even if we're only | ||
435 | # creating/signing the U-Boot fitImage | ||
436 | KERNEL_CLASSES = " kernel-fitimage" | ||
437 | INHERIT += "test-mkimage-wrapper" | ||
438 | # Enable creation and signing of the U-Boot fitImage | 1457 | # Enable creation and signing of the U-Boot fitImage |
439 | UBOOT_FITIMAGE_ENABLE = "1" | 1458 | UBOOT_FITIMAGE_ENABLE = "1" |
440 | SPL_SIGN_ENABLE = "1" | 1459 | SPL_SIGN_ENABLE = "1" |
@@ -452,106 +1471,9 @@ UBOOT_FIT_GENERATE_KEYS = "1" | |||
452 | UBOOT_FIT_HASH_ALG = "sha256" | 1471 | UBOOT_FIT_HASH_ALG = "sha256" |
453 | """ | 1472 | """ |
454 | self.write_config(config) | 1473 | self.write_config(config) |
1474 | bb_vars = self._fit_get_bb_vars() | ||
1475 | self._test_fitimage(bb_vars) | ||
455 | 1476 | ||
456 | # The U-Boot fitImage is created as part of the U-Boot recipe | ||
457 | bitbake("virtual/bootloader") | ||
458 | |||
459 | image_type = "core-image-minimal" | ||
460 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | ||
461 | machine = get_bb_var('MACHINE') | ||
462 | fitimage_its_path = os.path.join(deploy_dir_image, | ||
463 | "u-boot-its-%s" % (machine,)) | ||
464 | fitimage_path = os.path.join(deploy_dir_image, | ||
465 | "u-boot-fitImage-%s" % (machine,)) | ||
466 | |||
467 | self.assertTrue(os.path.exists(fitimage_its_path), | ||
468 | "%s image tree source doesn't exist" % (fitimage_its_path)) | ||
469 | self.assertTrue(os.path.exists(fitimage_path), | ||
470 | "%s FIT image doesn't exist" % (fitimage_path)) | ||
471 | |||
472 | req_itspaths = [ | ||
473 | ['/', 'images', 'uboot'], | ||
474 | ['/', 'images', 'uboot', 'signature'], | ||
475 | ['/', 'images', 'fdt'], | ||
476 | ['/', 'images', 'fdt', 'signature'], | ||
477 | ] | ||
478 | |||
479 | itspath = [] | ||
480 | itspaths = [] | ||
481 | linect = 0 | ||
482 | sigs = {} | ||
483 | with open(fitimage_its_path) as its_file: | ||
484 | linect += 1 | ||
485 | for line in its_file: | ||
486 | line = line.strip() | ||
487 | if line.endswith('};'): | ||
488 | itspath.pop() | ||
489 | elif line.endswith('{'): | ||
490 | itspath.append(line[:-1].strip()) | ||
491 | itspaths.append(itspath[:]) | ||
492 | elif itspath and itspath[-1] == 'signature': | ||
493 | itsdotpath = '.'.join(itspath) | ||
494 | if not itsdotpath in sigs: | ||
495 | sigs[itsdotpath] = {} | ||
496 | if not '=' in line or not line.endswith(';'): | ||
497 | self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line)) | ||
498 | key, value = line.split('=', 1) | ||
499 | sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';') | ||
500 | |||
501 | for reqpath in req_itspaths: | ||
502 | if not reqpath in itspaths: | ||
503 | self.fail('Missing section in its file: %s' % reqpath) | ||
504 | |||
505 | reqsigvalues_image = { | ||
506 | 'algo': '"sha256,rsa2048"', | ||
507 | 'key-name-hint': '"spl-oe-selftest"', | ||
508 | } | ||
509 | |||
510 | for itspath, values in sigs.items(): | ||
511 | reqsigvalues = reqsigvalues_image | ||
512 | for reqkey, reqvalue in reqsigvalues.items(): | ||
513 | value = values.get(reqkey, None) | ||
514 | if value is None: | ||
515 | self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath)) | ||
516 | self.assertEqual(value, reqvalue) | ||
517 | |||
518 | # Dump the image to see if it really got signed | ||
519 | bitbake("u-boot-tools-native -c addto_recipe_sysroot") | ||
520 | result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=') | ||
521 | recipe_sysroot_native = result.output.split('=')[1].strip('"') | ||
522 | dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage') | ||
523 | result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path)) | ||
524 | in_signed = None | ||
525 | signed_sections = {} | ||
526 | for line in result.output.splitlines(): | ||
527 | if line.startswith((' Image')): | ||
528 | in_signed = re.search(r'\((.*)\)', line).groups()[0] | ||
529 | elif re.match(' \w', line): | ||
530 | in_signed = None | ||
531 | elif in_signed: | ||
532 | if not in_signed in signed_sections: | ||
533 | signed_sections[in_signed] = {} | ||
534 | key, value = line.split(':', 1) | ||
535 | signed_sections[in_signed][key.strip()] = value.strip() | ||
536 | self.assertIn('uboot', signed_sections) | ||
537 | self.assertIn('fdt', signed_sections) | ||
538 | for signed_section, values in signed_sections.items(): | ||
539 | value = values.get('Sign algo', None) | ||
540 | self.assertEqual(value, 'sha256,rsa2048:spl-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section) | ||
541 | value = values.get('Sign value', None) | ||
542 | self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) | ||
543 | |||
544 | # Check for SPL_MKIMAGE_SIGN_ARGS | ||
545 | result = runCmd('bitbake -e virtual/bootloader | grep ^T=') | ||
546 | tempdir = result.output.split('=', 1)[1].strip().strip('') | ||
547 | result = runCmd('grep "a smart U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) | ||
548 | self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used') | ||
549 | |||
550 | # Check for evidence of test-mkimage-wrapper class | ||
551 | result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) | ||
552 | self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work') | ||
553 | result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) | ||
554 | self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work') | ||
555 | 1477 | ||
556 | def test_sign_cascaded_uboot_fit_image(self): | 1478 | def test_sign_cascaded_uboot_fit_image(self): |
557 | """ | 1479 | """ |
@@ -565,9 +1487,9 @@ UBOOT_FIT_HASH_ALG = "sha256" | |||
565 | via UBOOT_FIT_GENERATE_KEYS) | 1487 | via UBOOT_FIT_GENERATE_KEYS) |
566 | 3) Dumping the FIT image indicates signature values | 1488 | 3) Dumping the FIT image indicates signature values |
567 | are present | 1489 | are present |
568 | 4) Examination of the do_uboot_assemble_fitimage | 1490 | 4) Examination of the do_uboot_assemble_fitimage that |
569 | runfile/logfile indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN | 1491 | UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN and SPL_MKIMAGE_SIGN_ARGS |
570 | and SPL_MKIMAGE_SIGN_ARGS are working as expected. | 1492 | are working as expected. |
571 | Product: oe-core | 1493 | Product: oe-core |
572 | Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> based upon | 1494 | Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> based upon |
573 | work by Paul Eggleton <paul.eggleton@microsoft.com> and | 1495 | work by Paul Eggleton <paul.eggleton@microsoft.com> and |
@@ -588,7 +1510,7 @@ UBOOT_DTB_BINARY = "u-boot.dtb" | |||
588 | UBOOT_ENTRYPOINT = "0x80000000" | 1510 | UBOOT_ENTRYPOINT = "0x80000000" |
589 | UBOOT_LOADADDRESS = "0x80000000" | 1511 | UBOOT_LOADADDRESS = "0x80000000" |
590 | UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" | 1512 | UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" |
591 | UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded Kernel comment'" | 1513 | UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded U-Boot comment'" |
592 | UBOOT_DTB_LOADADDRESS = "0x82000000" | 1514 | UBOOT_DTB_LOADADDRESS = "0x82000000" |
593 | UBOOT_ARCH = "arm" | 1515 | UBOOT_ARCH = "arm" |
594 | SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" | 1516 | SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" |
@@ -596,251 +1518,214 @@ SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded U-Boot comment'" | |||
596 | UBOOT_EXTLINUX = "0" | 1518 | UBOOT_EXTLINUX = "0" |
597 | UBOOT_FIT_GENERATE_KEYS = "1" | 1519 | UBOOT_FIT_GENERATE_KEYS = "1" |
598 | UBOOT_FIT_HASH_ALG = "sha256" | 1520 | UBOOT_FIT_HASH_ALG = "sha256" |
599 | KERNEL_IMAGETYPES += " fitImage " | ||
600 | KERNEL_CLASSES = " kernel-fitimage " | ||
601 | INHERIT += "test-mkimage-wrapper" | ||
602 | UBOOT_SIGN_ENABLE = "1" | 1521 | UBOOT_SIGN_ENABLE = "1" |
603 | FIT_GENERATE_KEYS = "1" | ||
604 | UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" | 1522 | UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" |
605 | UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest" | ||
606 | UBOOT_SIGN_KEYNAME = "cfg-oe-selftest" | 1523 | UBOOT_SIGN_KEYNAME = "cfg-oe-selftest" |
607 | FIT_SIGN_INDIVIDUAL = "1" | ||
608 | """ | 1524 | """ |
609 | self.write_config(config) | 1525 | self.write_config(config) |
1526 | bb_vars = self._fit_get_bb_vars() | ||
610 | 1527 | ||
611 | # The U-Boot fitImage is created as part of the U-Boot recipe | 1528 | self._gen_signing_key(bb_vars) |
612 | bitbake("virtual/bootloader") | 1529 | self._test_fitimage(bb_vars) |
613 | 1530 | self._check_kernel_dtb(bb_vars) | |
614 | image_type = "core-image-minimal" | ||
615 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | ||
616 | machine = get_bb_var('MACHINE') | ||
617 | fitimage_its_path = os.path.join(deploy_dir_image, | ||
618 | "u-boot-its-%s" % (machine,)) | ||
619 | fitimage_path = os.path.join(deploy_dir_image, | ||
620 | "u-boot-fitImage-%s" % (machine,)) | ||
621 | |||
622 | self.assertTrue(os.path.exists(fitimage_its_path), | ||
623 | "%s image tree source doesn't exist" % (fitimage_its_path)) | ||
624 | self.assertTrue(os.path.exists(fitimage_path), | ||
625 | "%s FIT image doesn't exist" % (fitimage_path)) | ||
626 | |||
627 | req_itspaths = [ | ||
628 | ['/', 'images', 'uboot'], | ||
629 | ['/', 'images', 'uboot', 'signature'], | ||
630 | ['/', 'images', 'fdt'], | ||
631 | ['/', 'images', 'fdt', 'signature'], | ||
632 | ] | ||
633 | 1531 | ||
634 | itspath = [] | 1532 | def test_uboot_atf_tee_fit_image(self): |
635 | itspaths = [] | 1533 | """ |
636 | linect = 0 | 1534 | Summary: Check if U-boot FIT image and Image Tree Source |
637 | sigs = {} | 1535 | (its) are built and the Image Tree Source has the |
638 | with open(fitimage_its_path) as its_file: | 1536 | correct fields. |
639 | linect += 1 | 1537 | Expected: 1. Create atf and tee dummy images |
640 | for line in its_file: | 1538 | 2. Both u-boot-fitImage and u-boot-its can be built |
641 | line = line.strip() | 1539 | 3. The os, load address, entrypoint address and |
642 | if line.endswith('};'): | 1540 | default values of U-boot, ATF and TEE images are |
643 | itspath.pop() | 1541 | correct in the Image Tree Source. Not all the |
644 | elif line.endswith('{'): | 1542 | fields are tested, only the key fields that wont |
645 | itspath.append(line[:-1].strip()) | 1543 | vary between different architectures. |
646 | itspaths.append(itspath[:]) | 1544 | Product: oe-core |
647 | elif itspath and itspath[-1] == 'signature': | 1545 | Author: Jamin Lin <jamin_lin@aspeedtech.com> |
648 | itsdotpath = '.'.join(itspath) | 1546 | """ |
649 | if not itsdotpath in sigs: | 1547 | config = """ |
650 | sigs[itsdotpath] = {} | 1548 | # We need at least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set |
651 | if not '=' in line or not line.endswith(';'): | 1549 | MACHINE = "qemuarm" |
652 | self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line)) | 1550 | UBOOT_MACHINE = "am57xx_evm_defconfig" |
653 | key, value = line.split('=', 1) | 1551 | SPL_BINARY = "MLO" |
654 | sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';') | ||
655 | 1552 | ||
656 | for reqpath in req_itspaths: | 1553 | # Enable creation of the U-Boot fitImage |
657 | if not reqpath in itspaths: | 1554 | UBOOT_FITIMAGE_ENABLE = "1" |
658 | self.fail('Missing section in its file: %s' % reqpath) | ||
659 | 1555 | ||
660 | reqsigvalues_image = { | 1556 | # (U-boot) fitImage properties |
661 | 'algo': '"sha256,rsa2048"', | 1557 | UBOOT_LOADADDRESS = "0x80080000" |
662 | 'key-name-hint': '"spl-cascaded-oe-selftest"', | 1558 | UBOOT_ENTRYPOINT = "0x80080000" |
663 | } | 1559 | UBOOT_FIT_DESC = "A model description" |
664 | 1560 | ||
665 | for itspath, values in sigs.items(): | 1561 | # Enable creation of the TEE fitImage |
666 | reqsigvalues = reqsigvalues_image | 1562 | UBOOT_FIT_TEE = "1" |
667 | for reqkey, reqvalue in reqsigvalues.items(): | ||
668 | value = values.get(reqkey, None) | ||
669 | if value is None: | ||
670 | self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath)) | ||
671 | self.assertEqual(value, reqvalue) | ||
672 | |||
673 | # Dump the image to see if it really got signed | ||
674 | bitbake("u-boot-tools-native -c addto_recipe_sysroot") | ||
675 | result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=') | ||
676 | recipe_sysroot_native = result.output.split('=')[1].strip('"') | ||
677 | dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage') | ||
678 | result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path)) | ||
679 | in_signed = None | ||
680 | signed_sections = {} | ||
681 | for line in result.output.splitlines(): | ||
682 | if line.startswith((' Image')): | ||
683 | in_signed = re.search(r'\((.*)\)', line).groups()[0] | ||
684 | elif re.match(' \w', line): | ||
685 | in_signed = None | ||
686 | elif in_signed: | ||
687 | if not in_signed in signed_sections: | ||
688 | signed_sections[in_signed] = {} | ||
689 | key, value = line.split(':', 1) | ||
690 | signed_sections[in_signed][key.strip()] = value.strip() | ||
691 | self.assertIn('uboot', signed_sections) | ||
692 | self.assertIn('fdt', signed_sections) | ||
693 | for signed_section, values in signed_sections.items(): | ||
694 | value = values.get('Sign algo', None) | ||
695 | self.assertEqual(value, 'sha256,rsa2048:spl-cascaded-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section) | ||
696 | value = values.get('Sign value', None) | ||
697 | self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) | ||
698 | |||
699 | # Check for SPL_MKIMAGE_SIGN_ARGS | ||
700 | result = runCmd('bitbake -e virtual/bootloader | grep ^T=') | ||
701 | tempdir = result.output.split('=', 1)[1].strip().strip('') | ||
702 | result = runCmd('grep "a smart cascaded U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) | ||
703 | self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used') | ||
704 | |||
705 | # Check for evidence of test-mkimage-wrapper class | ||
706 | result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) | ||
707 | self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work') | ||
708 | result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) | ||
709 | self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work') | ||
710 | |||
711 | |||
712 | |||
713 | def test_initramfs_bundle(self): | ||
714 | """ | ||
715 | Summary: Verifies the content of the initramfs bundle node in the FIT Image Tree Source (its) | ||
716 | The FIT settings are set by the test case. | ||
717 | The machine used is beaglebone-yocto. | ||
718 | Expected: 1. The ITS is generated with initramfs bundle support | ||
719 | 2. All the fields in the kernel node are as expected (matching the | ||
720 | conf settings) | ||
721 | 3. The kernel is included in all the available configurations and | ||
722 | its hash is included in the configuration signature | ||
723 | 1563 | ||
724 | Product: oe-core | 1564 | # TEE fitImage properties |
725 | Author: Abdellatif El Khlifi <abdellatif.elkhlifi@arm.com> | 1565 | UBOOT_FIT_TEE_IMAGE = "${TOPDIR}/tee-dummy.bin" |
726 | """ | 1566 | UBOOT_FIT_TEE_LOADADDRESS = "0x80180000" |
1567 | UBOOT_FIT_TEE_ENTRYPOINT = "0x80180000" | ||
1568 | |||
1569 | # Enable creation of the ATF fitImage | ||
1570 | UBOOT_FIT_ARM_TRUSTED_FIRMWARE = "1" | ||
1571 | |||
1572 | # ATF fitImage properties | ||
1573 | UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE = "${TOPDIR}/atf-dummy.bin" | ||
1574 | UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS = "0x80280000" | ||
1575 | UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT = "0x80280000" | ||
1576 | """ | ||
1577 | self.write_config(config) | ||
1578 | |||
1579 | bb_vars = self._fit_get_bb_vars([ | ||
1580 | 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE', | ||
1581 | 'UBOOT_FIT_TEE_IMAGE', | ||
1582 | ]) | ||
1583 | |||
1584 | # Create an ATF dummy image | ||
1585 | dummy_atf = os.path.join(self.builddir, bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE']) | ||
1586 | FitImageTestCase._gen_random_file(dummy_atf) | ||
727 | 1587 | ||
1588 | # Create a TEE dummy image | ||
1589 | dummy_tee = os.path.join(self.builddir, bb_vars['UBOOT_FIT_TEE_IMAGE']) | ||
1590 | FitImageTestCase._gen_random_file(dummy_tee) | ||
1591 | |||
1592 | self._test_fitimage(bb_vars) | ||
1593 | |||
1594 | def test_sign_standalone_uboot_atf_tee_fit_image(self): | ||
1595 | """ | ||
1596 | Summary: Check if U-Boot FIT image and Image Tree Source (its) are | ||
1597 | created and signed correctly for the scenario where only | ||
1598 | the U-Boot proper fitImage is being created and signed. | ||
1599 | Expected: 1. Create atf and tee dummy images | ||
1600 | 2. U-Boot its and FIT image are built successfully | ||
1601 | 3. Scanning the its file indicates signing is enabled | ||
1602 | as requested by SPL_SIGN_ENABLE (using keys generated | ||
1603 | via UBOOT_FIT_GENERATE_KEYS) | ||
1604 | 4. Dumping the FIT image indicates signature values | ||
1605 | are present | ||
1606 | 5. Examination of the do_uboot_assemble_fitimage | ||
1607 | runfile/logfile indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN | ||
1608 | and SPL_MKIMAGE_SIGN_ARGS are working as expected. | ||
1609 | Product: oe-core | ||
1610 | Author: Jamin Lin <jamin_lin@aspeedtech.com> | ||
1611 | """ | ||
728 | config = """ | 1612 | config = """ |
729 | DISTRO="poky" | 1613 | # There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at |
730 | MACHINE = "beaglebone-yocto" | 1614 | # least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set |
731 | INITRAMFS_IMAGE_BUNDLE = "1" | 1615 | MACHINE = "qemuarm" |
732 | INITRAMFS_IMAGE = "core-image-minimal-initramfs" | 1616 | UBOOT_MACHINE = "am57xx_evm_defconfig" |
733 | INITRAMFS_SCRIPTS = "" | 1617 | SPL_BINARY = "MLO" |
734 | UBOOT_MACHINE = "am335x_evm_defconfig" | 1618 | # Enable creation and signing of the U-Boot fitImage |
735 | KERNEL_CLASSES = " kernel-fitimage " | 1619 | UBOOT_FITIMAGE_ENABLE = "1" |
736 | KERNEL_IMAGETYPES = "fitImage" | 1620 | SPL_SIGN_ENABLE = "1" |
737 | UBOOT_SIGN_ENABLE = "1" | 1621 | SPL_SIGN_KEYNAME = "spl-oe-selftest" |
738 | UBOOT_SIGN_KEYNAME = "beaglebonekey" | 1622 | SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys" |
739 | UBOOT_SIGN_KEYDIR ?= "${DEPLOY_DIR_IMAGE}" | ||
740 | UBOOT_DTB_BINARY = "u-boot.dtb" | 1623 | UBOOT_DTB_BINARY = "u-boot.dtb" |
741 | UBOOT_ENTRYPOINT = "0x80000000" | 1624 | UBOOT_ENTRYPOINT = "0x80000000" |
742 | UBOOT_LOADADDRESS = "0x80000000" | 1625 | UBOOT_LOADADDRESS = "0x80000000" |
743 | UBOOT_DTB_LOADADDRESS = "0x82000000" | ||
744 | UBOOT_ARCH = "arm" | 1626 | UBOOT_ARCH = "arm" |
745 | UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" | 1627 | SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" |
746 | UBOOT_MKIMAGE_KERNEL_TYPE = "kernel" | 1628 | SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot ATF TEE comment'" |
747 | UBOOT_EXTLINUX = "0" | 1629 | UBOOT_EXTLINUX = "0" |
748 | FIT_GENERATE_KEYS = "1" | 1630 | UBOOT_FIT_GENERATE_KEYS = "1" |
749 | KERNEL_IMAGETYPE_REPLACEMENT = "zImage" | 1631 | UBOOT_FIT_HASH_ALG = "sha256" |
750 | FIT_KERNEL_COMP_ALG = "none" | ||
751 | FIT_HASH_ALG = "sha256" | ||
752 | """ | ||
753 | self.write_config(config) | ||
754 | 1632 | ||
755 | # fitImage is created as part of linux recipe | 1633 | # Enable creation of the TEE fitImage |
756 | bitbake("virtual/kernel") | 1634 | UBOOT_FIT_TEE = "1" |
757 | 1635 | ||
758 | image_type = get_bb_var('INITRAMFS_IMAGE') | 1636 | # TEE fitImage properties |
759 | deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') | 1637 | UBOOT_FIT_TEE_IMAGE = "${TOPDIR}/tee-dummy.bin" |
760 | machine = get_bb_var('MACHINE') | 1638 | UBOOT_FIT_TEE_LOADADDRESS = "0x80180000" |
761 | fitimage_its_path = os.path.join(deploy_dir_image, | 1639 | UBOOT_FIT_TEE_ENTRYPOINT = "0x80180000" |
762 | "fitImage-its-%s-%s-%s" % (image_type, machine, machine)) | ||
763 | fitimage_path = os.path.join(deploy_dir_image,"fitImage") | ||
764 | 1640 | ||
765 | self.assertTrue(os.path.exists(fitimage_its_path), | 1641 | # Enable creation of the ATF fitImage |
766 | "%s image tree source doesn't exist" % (fitimage_its_path)) | 1642 | UBOOT_FIT_ARM_TRUSTED_FIRMWARE = "1" |
767 | self.assertTrue(os.path.exists(fitimage_path), | ||
768 | "%s FIT image doesn't exist" % (fitimage_path)) | ||
769 | 1643 | ||
770 | kernel_load = str(get_bb_var('UBOOT_LOADADDRESS')) | 1644 | # ATF fitImage properties |
771 | kernel_entry = str(get_bb_var('UBOOT_ENTRYPOINT')) | 1645 | UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE = "${TOPDIR}/atf-dummy.bin" |
772 | kernel_type = str(get_bb_var('UBOOT_MKIMAGE_KERNEL_TYPE')) | 1646 | UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS = "0x80280000" |
773 | kernel_compression = str(get_bb_var('FIT_KERNEL_COMP_ALG')) | 1647 | UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT = "0x80280000" |
774 | uboot_arch = str(get_bb_var('UBOOT_ARCH')) | 1648 | """ |
775 | fit_hash_alg = str(get_bb_var('FIT_HASH_ALG')) | 1649 | self.write_config(config) |
776 | 1650 | ||
777 | its_file = open(fitimage_its_path) | 1651 | bb_vars = self._fit_get_bb_vars([ |
1652 | 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE', | ||
1653 | 'UBOOT_FIT_TEE_IMAGE', | ||
1654 | ]) | ||
778 | 1655 | ||
779 | its_lines = [line.strip() for line in its_file.readlines()] | 1656 | # Create an ATF dummy image |
1657 | dummy_atf = os.path.join(self.builddir, bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE']) | ||
1658 | FitImageTestCase._gen_random_file(dummy_atf) | ||
780 | 1659 | ||
781 | exp_node_lines = [ | 1660 | # Create a TEE dummy image |
782 | 'kernel-1 {', | 1661 | dummy_tee = os.path.join(self.builddir, bb_vars['UBOOT_FIT_TEE_IMAGE']) |
783 | 'description = "Linux kernel";', | 1662 | FitImageTestCase._gen_random_file(dummy_tee) |
784 | 'data = /incbin/("linux.bin");', | ||
785 | 'type = "' + kernel_type + '";', | ||
786 | 'arch = "' + uboot_arch + '";', | ||
787 | 'os = "linux";', | ||
788 | 'compression = "' + kernel_compression + '";', | ||
789 | 'load = <' + kernel_load + '>;', | ||
790 | 'entry = <' + kernel_entry + '>;', | ||
791 | 'hash-1 {', | ||
792 | 'algo = "' + fit_hash_alg +'";', | ||
793 | '};', | ||
794 | '};' | ||
795 | ] | ||
796 | 1663 | ||
797 | node_str = exp_node_lines[0] | 1664 | self._test_fitimage(bb_vars) |
798 | 1665 | ||
799 | test_passed = False | ||
800 | 1666 | ||
801 | print ("checking kernel node\n") | 1667 | def test_sign_uboot_kernel_individual(self): |
1668 | """ | ||
1669 | Summary: Check if the device-tree from U-Boot has two public keys | ||
1670 | for verifying the kernel FIT image created by the | ||
1671 | kernel-fitimage.bbclass included. | ||
1672 | This test sets: FIT_SIGN_INDIVIDUAL = "1" | ||
1673 | Expected: There must be two signature nodes. One is required for | ||
1674 | the individual image nodes, the other is required for the | ||
1675 | verification of the configuration section. | ||
1676 | """ | ||
1677 | config = """ | ||
1678 | # Enable creation of fitImage | ||
1679 | MACHINE = "beaglebone-yocto" | ||
1680 | UBOOT_SIGN_ENABLE = "1" | ||
1681 | UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" | ||
1682 | UBOOT_SIGN_KEYNAME = "the-kernel-config-key" | ||
1683 | UBOOT_SIGN_IMG_KEYNAME = "the-kernel-image-key" | ||
1684 | UBOOT_MKIMAGE_DTCOPTS="-I dts -O dtb -p 2000" | ||
1685 | FIT_SIGN_INDIVIDUAL = "1" | ||
1686 | """ | ||
1687 | self.write_config(config) | ||
1688 | bb_vars = self._fit_get_bb_vars() | ||
1689 | self._gen_signing_key(bb_vars) | ||
802 | 1690 | ||
803 | if node_str in its_lines: | 1691 | bitbake(UBootFitImageTests.BOOTLOADER_RECIPE) |
804 | node_start_idx = its_lines.index(node_str) | ||
805 | node = its_lines[node_start_idx:(node_start_idx + len(exp_node_lines))] | ||
806 | if node == exp_node_lines: | ||
807 | print("kernel node verified") | ||
808 | else: | ||
809 | self.assertTrue(test_passed == True,"kernel node does not match expectation") | ||
810 | |||
811 | rx_configs = re.compile("^conf-.*") | ||
812 | its_configs = list(filter(rx_configs.match, its_lines)) | ||
813 | |||
814 | for cfg_str in its_configs: | ||
815 | cfg_start_idx = its_lines.index(cfg_str) | ||
816 | line_idx = cfg_start_idx + 2 | ||
817 | node_end = False | ||
818 | while node_end == False: | ||
819 | if its_lines[line_idx] == "};" and its_lines[line_idx-1] == "};" : | ||
820 | node_end = True | ||
821 | line_idx = line_idx + 1 | ||
822 | |||
823 | node = its_lines[cfg_start_idx:line_idx] | ||
824 | print("checking configuration " + cfg_str.rstrip(" {")) | ||
825 | rx_desc_line = re.compile("^description.*1 Linux kernel.*") | ||
826 | if len(list(filter(rx_desc_line.match, node))) != 1: | ||
827 | self.assertTrue(test_passed == True,"kernel keyword not found in the description line") | ||
828 | break | ||
829 | else: | ||
830 | print("kernel keyword found in the description line") | ||
831 | 1692 | ||
832 | if 'kernel = "kernel-1";' not in node: | 1693 | # Just check the DTB of u-boot since there is no u-boot FIT image |
833 | self.assertTrue(test_passed == True,"kernel line not found") | 1694 | self._check_kernel_dtb(bb_vars) |
834 | break | ||
835 | else: | ||
836 | print("kernel line found") | ||
837 | 1695 | ||
838 | rx_sign_line = re.compile("^sign-images.*kernel.*") | ||
839 | if len(list(filter(rx_sign_line.match, node))) != 1: | ||
840 | self.assertTrue(test_passed == True,"kernel hash not signed") | ||
841 | break | ||
842 | else: | ||
843 | print("kernel hash signed") | ||
844 | 1696 | ||
845 | test_passed = True | 1697 | def test_sign_uboot_fit_image_without_spl(self): |
846 | self.assertTrue(test_passed == True,"Initramfs bundle test success") | 1698 | """ |
1699 | Summary: Check if U-Boot FIT image and Image Tree Source (its) are | ||
1700 | created and signed correctly for the scenario where only | ||
1701 | the U-Boot proper fitImage is being created and signed | ||
1702 | (no SPL included). | ||
1703 | Expected: 1) U-Boot its and FIT image are built successfully | ||
1704 | 2) Scanning the its file indicates signing is enabled | ||
1705 | as requested by SPL_SIGN_ENABLE (using keys generated | ||
1706 | via UBOOT_FIT_GENERATE_KEYS) | ||
1707 | 3) Dumping the FIT image indicates signature values | ||
1708 | are present | ||
1709 | 4) Examination of the do_uboot_assemble_fitimage | ||
1710 | runfile/logfile indicate that UBOOT_MKIMAGE and | ||
1711 | UBOOT_MKIMAGE_SIGN are working as expected. | ||
1712 | Product: oe-core | ||
1713 | Author: Jamin Lin <jamin_lin@aspeedtech.com> | ||
1714 | """ | ||
1715 | config = """ | ||
1716 | # There's no U-boot defconfig with CONFIG_FIT_SIGNATURE yet, so we need at | ||
1717 | # least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set | ||
1718 | MACHINE = "qemuarm" | ||
1719 | UBOOT_MACHINE = "am57xx_evm_defconfig" | ||
1720 | # Enable creation and signing of the U-Boot fitImage (no SPL) | ||
1721 | UBOOT_FITIMAGE_ENABLE = "1" | ||
1722 | SPL_DTB_BINARY = "" | ||
1723 | SPL_SIGN_ENABLE = "1" | ||
1724 | SPL_SIGN_KEYNAME = "spl-oe-selftest" | ||
1725 | SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys" | ||
1726 | UBOOT_FIT_GENERATE_KEYS = "1" | ||
1727 | """ | ||
1728 | self.write_config(config) | ||
1729 | bb_vars = self._fit_get_bb_vars() | ||
1730 | self._test_fitimage(bb_vars) | ||
1731 | |||
diff --git a/meta/lib/oeqa/selftest/cases/gcc.py b/meta/lib/oeqa/selftest/cases/gcc.py index 89360178fe..1bda29a72b 100644 --- a/meta/lib/oeqa/selftest/cases/gcc.py +++ b/meta/lib/oeqa/selftest/cases/gcc.py | |||
@@ -37,7 +37,7 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase): | |||
37 | features = [] | 37 | features = [] |
38 | features.append('MAKE_CHECK_TARGETS = "{0}"'.format(" ".join(targets))) | 38 | features.append('MAKE_CHECK_TARGETS = "{0}"'.format(" ".join(targets))) |
39 | if ssh is not None: | 39 | if ssh is not None: |
40 | features.append('TOOLCHAIN_TEST_TARGET = "ssh"') | 40 | features.append('TOOLCHAIN_TEST_TARGET = "linux-ssh"') |
41 | features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh)) | 41 | features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh)) |
42 | features.append('TOOLCHAIN_TEST_HOST_USER = "root"') | 42 | features.append('TOOLCHAIN_TEST_HOST_USER = "root"') |
43 | features.append('TOOLCHAIN_TEST_HOST_PORT = "22"') | 43 | features.append('TOOLCHAIN_TEST_HOST_PORT = "22"') |
@@ -83,6 +83,8 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase): | |||
83 | # validate that SSH is working | 83 | # validate that SSH is working |
84 | status, _ = qemu.run("uname") | 84 | status, _ = qemu.run("uname") |
85 | self.assertEqual(status, 0) | 85 | self.assertEqual(status, 0) |
86 | qemu.run('echo "MaxStartups 75:30:100" >> /etc/ssh/sshd_config') | ||
87 | qemu.run('service sshd restart') | ||
86 | 88 | ||
87 | return self.run_check(*args, ssh=qemu.ip, **kwargs) | 89 | return self.run_check(*args, ssh=qemu.ip, **kwargs) |
88 | 90 | ||
diff --git a/meta/lib/oeqa/selftest/cases/gdbserver.py b/meta/lib/oeqa/selftest/cases/gdbserver.py index 9da97ae780..b6b7c5c473 100644 --- a/meta/lib/oeqa/selftest/cases/gdbserver.py +++ b/meta/lib/oeqa/selftest/cases/gdbserver.py | |||
@@ -54,7 +54,7 @@ CORE_IMAGE_EXTRA_INSTALL = "gdbserver" | |||
54 | self.logger.warning("starting gdb %s" % cmd) | 54 | self.logger.warning("starting gdb %s" % cmd) |
55 | r = runCmd(cmd, native_sysroot=native_sysroot, target_sys=target_sys) | 55 | r = runCmd(cmd, native_sysroot=native_sysroot, target_sys=target_sys) |
56 | self.assertEqual(0, r.status) | 56 | self.assertEqual(0, r.status) |
57 | line_re = r"Line \d+ of \"/usr/src/debug/kmod/.*/tools/kmod.c\" starts at address 0x[0-9A-Fa-f]+ <kmod_help>" | 57 | line_re = r"Line \d+ of \".*\" starts at address 0x[0-9A-Fa-f]+ <kmod_help>" |
58 | self.assertRegex(r.output, line_re) | 58 | self.assertRegex(r.output, line_re) |
59 | break | 59 | break |
60 | else: | 60 | else: |
diff --git a/meta/lib/oeqa/selftest/cases/imagefeatures.py b/meta/lib/oeqa/selftest/cases/imagefeatures.py index dc88c222bd..94d01ba116 100644 --- a/meta/lib/oeqa/selftest/cases/imagefeatures.py +++ b/meta/lib/oeqa/selftest/cases/imagefeatures.py | |||
@@ -250,12 +250,7 @@ USERADD_GID_TABLES += "files/static-group" | |||
250 | DISTRO_FEATURES:append = " pam opengl wayland" | 250 | DISTRO_FEATURES:append = " pam opengl wayland" |
251 | 251 | ||
252 | # Switch to systemd | 252 | # Switch to systemd |
253 | DISTRO_FEATURES:append = " systemd usrmerge" | 253 | INIT_MANAGER = "systemd" |
254 | VIRTUAL-RUNTIME_init_manager = "systemd" | ||
255 | VIRTUAL-RUNTIME_initscripts = "" | ||
256 | VIRTUAL-RUNTIME_syslog = "" | ||
257 | VIRTUAL-RUNTIME_login_manager = "shadow-base" | ||
258 | DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit" | ||
259 | 254 | ||
260 | # Replace busybox | 255 | # Replace busybox |
261 | PREFERRED_PROVIDER_virtual/base-utils = "packagegroup-core-base-utils" | 256 | PREFERRED_PROVIDER_virtual/base-utils = "packagegroup-core-base-utils" |
@@ -319,7 +314,7 @@ SKIP_RECIPE[busybox] = "Don't build this" | |||
319 | """ | 314 | """ |
320 | config = """ | 315 | config = """ |
321 | DISTRO_FEATURES:append = " api-documentation" | 316 | DISTRO_FEATURES:append = " api-documentation" |
322 | CORE_IMAGE_EXTRA_INSTALL = "man-pages kmod-doc" | 317 | CORE_IMAGE_EXTRA_INSTALL = "man-pages" |
323 | """ | 318 | """ |
324 | self.write_config(config) | 319 | self.write_config(config) |
325 | bitbake("core-image-minimal") | 320 | bitbake("core-image-minimal") |
@@ -330,7 +325,7 @@ CORE_IMAGE_EXTRA_INSTALL = "man-pages kmod-doc" | |||
330 | self.assertEqual(status, 1, 'Failed to run apropos: %s' % (output)) | 325 | self.assertEqual(status, 1, 'Failed to run apropos: %s' % (output)) |
331 | self.assertIn("iso_8859_15", output) | 326 | self.assertIn("iso_8859_15", output) |
332 | 327 | ||
333 | # This manpage is provided by kmod | 328 | # This manpage is provided by man-pages |
334 | status, output = qemu.run_serial("man --pager=cat modprobe") | 329 | status, output = qemu.run_serial("man --pager=cat intro") |
335 | self.assertEqual(status, 1, 'Failed to run man: %s' % (output)) | 330 | self.assertEqual(status, 1, 'Failed to run man: %s' % (output)) |
336 | self.assertIn("force-modversion", output) | 331 | self.assertIn("introduction to user commands", output) |
diff --git a/meta/lib/oeqa/selftest/cases/incompatible_lic.py b/meta/lib/oeqa/selftest/cases/incompatible_lic.py index f4af67a239..93884f5731 100644 --- a/meta/lib/oeqa/selftest/cases/incompatible_lic.py +++ b/meta/lib/oeqa/selftest/cases/incompatible_lic.py | |||
@@ -102,6 +102,7 @@ class IncompatibleLicensePerImageTests(OESelftestTestCase): | |||
102 | return """ | 102 | return """ |
103 | IMAGE_INSTALL:append = " bash" | 103 | IMAGE_INSTALL:append = " bash" |
104 | INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*" | 104 | INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*" |
105 | MACHINE_ESSENTIAL_EXTRA_RDEPENDS:remove = "tar" | ||
105 | """ | 106 | """ |
106 | 107 | ||
107 | def test_bash_default(self): | 108 | def test_bash_default(self): |
@@ -114,7 +115,7 @@ INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*" | |||
114 | 115 | ||
115 | def test_bash_and_license(self): | 116 | def test_bash_and_license(self): |
116 | self.disable_class("create-spdx") | 117 | self.disable_class("create-spdx") |
117 | self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " & SomeLicense"') | 118 | self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " & SomeLicense"\nERROR_QA:remove:pn-bash = "license-exists"') |
118 | error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later" | 119 | error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later" |
119 | 120 | ||
120 | result = bitbake('core-image-minimal', ignore_status=True) | 121 | result = bitbake('core-image-minimal', ignore_status=True) |
@@ -123,12 +124,12 @@ INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*" | |||
123 | 124 | ||
124 | def test_bash_or_license(self): | 125 | def test_bash_or_license(self): |
125 | self.disable_class("create-spdx") | 126 | self.disable_class("create-spdx") |
126 | self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " | SomeLicense"') | 127 | self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " | SomeLicense"\nERROR_QA:remove:pn-bash = "license-exists"\nERROR_QA:remove:pn-core-image-minimal = "license-file-missing"') |
127 | 128 | ||
128 | bitbake('core-image-minimal') | 129 | bitbake('core-image-minimal') |
129 | 130 | ||
130 | def test_bash_license_exceptions(self): | 131 | def test_bash_license_exceptions(self): |
131 | self.write_config(self.default_config() + '\nINCOMPATIBLE_LICENSE_EXCEPTIONS:pn-core-image-minimal = "bash:GPL-3.0-or-later"') | 132 | self.write_config(self.default_config() + '\nINCOMPATIBLE_LICENSE_EXCEPTIONS:pn-core-image-minimal = "bash:GPL-3.0-or-later"\nERROR_QA:remove:pn-core-image-minimal = "license-exception"') |
132 | 133 | ||
133 | bitbake('core-image-minimal') | 134 | bitbake('core-image-minimal') |
134 | 135 | ||
@@ -136,6 +137,8 @@ class NoGPL3InImagesTests(OESelftestTestCase): | |||
136 | def test_core_image_minimal(self): | 137 | def test_core_image_minimal(self): |
137 | self.write_config(""" | 138 | self.write_config(""" |
138 | INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*" | 139 | INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*" |
140 | |||
141 | require conf/distro/include/no-gplv3.inc | ||
139 | """) | 142 | """) |
140 | bitbake('core-image-minimal') | 143 | bitbake('core-image-minimal') |
141 | 144 | ||
diff --git a/meta/lib/oeqa/selftest/cases/liboe.py b/meta/lib/oeqa/selftest/cases/liboe.py index d5ffffdcb4..930354c931 100644 --- a/meta/lib/oeqa/selftest/cases/liboe.py +++ b/meta/lib/oeqa/selftest/cases/liboe.py | |||
@@ -9,11 +9,11 @@ from oeqa.utils.commands import get_bb_var, get_bb_vars, bitbake, runCmd | |||
9 | import oe.path | 9 | import oe.path |
10 | import os | 10 | import os |
11 | 11 | ||
12 | class LibOE(OESelftestTestCase): | 12 | class CopyTreeTests(OESelftestTestCase): |
13 | 13 | ||
14 | @classmethod | 14 | @classmethod |
15 | def setUpClass(cls): | 15 | def setUpClass(cls): |
16 | super(LibOE, cls).setUpClass() | 16 | super().setUpClass() |
17 | cls.tmp_dir = get_bb_var('TMPDIR') | 17 | cls.tmp_dir = get_bb_var('TMPDIR') |
18 | 18 | ||
19 | def test_copy_tree_special(self): | 19 | def test_copy_tree_special(self): |
@@ -102,3 +102,36 @@ class LibOE(OESelftestTestCase): | |||
102 | self.assertEqual(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt)) | 102 | self.assertEqual(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt)) |
103 | 103 | ||
104 | oe.path.remove(testloc) | 104 | oe.path.remove(testloc) |
105 | |||
106 | class SubprocessTests(OESelftestTestCase): | ||
107 | |||
108 | def test_subprocess_tweak(self): | ||
109 | """ | ||
110 | Test that the string representation of | ||
111 | oeqa.utils.subprocesstweak.OETestCalledProcessError includes stdout and | ||
112 | stderr, as expected. | ||
113 | """ | ||
114 | script = """ | ||
115 | #! /bin/sh | ||
116 | echo Ivn fgqbhg | tr '[a-zA-Z]' '[n-za-mN-ZA-M]' | ||
117 | echo Ivn fgqree | tr '[a-zA-Z]' '[n-za-mN-ZA-M]' >&2 | ||
118 | exit 42 | ||
119 | """ | ||
120 | |||
121 | import subprocess | ||
122 | import unittest.mock | ||
123 | from oeqa.utils.subprocesstweak import OETestCalledProcessError | ||
124 | |||
125 | with self.assertRaises(OETestCalledProcessError) as cm: | ||
126 | with unittest.mock.patch("subprocess.CalledProcessError", OETestCalledProcessError): | ||
127 | subprocess.run(["bash", "-"], input=script, text=True, capture_output=True, check=True) | ||
128 | |||
129 | e = cm.exception | ||
130 | self.assertEqual(e.returncode, 42) | ||
131 | self.assertEqual("Via stdout\n", e.stdout) | ||
132 | self.assertEqual("Via stderr\n", e.stderr) | ||
133 | |||
134 | string = str(e) | ||
135 | self.assertIn("exit status 42", string) | ||
136 | self.assertIn("Standard Output: Via stdout", string) | ||
137 | self.assertIn("Standard Error: Via stderr", string) | ||
diff --git a/meta/lib/oeqa/selftest/cases/locales.py b/meta/lib/oeqa/selftest/cases/locales.py index 4ca8ffb7aa..ac4888ef66 100644 --- a/meta/lib/oeqa/selftest/cases/locales.py +++ b/meta/lib/oeqa/selftest/cases/locales.py | |||
@@ -14,7 +14,7 @@ class LocalesTest(OESelftestTestCase): | |||
14 | features = [] | 14 | features = [] |
15 | features.append('EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password allow-root-login"') | 15 | features.append('EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password allow-root-login"') |
16 | features.append('IMAGE_INSTALL:append = " glibc-utils localedef"') | 16 | features.append('IMAGE_INSTALL:append = " glibc-utils localedef"') |
17 | features.append('GLIBC_GENERATE_LOCALES = "en_US.UTF-8 fr_FR.UTF-8"') | 17 | features.append('GLIBC_GENERATE_LOCALES = "en_US.UTF-8 fr_FR.UTF-8 en_US.ISO-8859-1 de_DE.UTF-8 fr_FR.ISO-8859-1 zh_HK.BIG5-HKSCS tr_TR.UTF-8"') |
18 | features.append('IMAGE_LINGUAS:append = " en-us fr-fr"') | 18 | features.append('IMAGE_LINGUAS:append = " en-us fr-fr"') |
19 | if binary_enabled: | 19 | if binary_enabled: |
20 | features.append('ENABLE_BINARY_LOCALE_GENERATION = "1"') | 20 | features.append('ENABLE_BINARY_LOCALE_GENERATION = "1"') |
diff --git a/meta/lib/oeqa/selftest/cases/meta_ide.py b/meta/lib/oeqa/selftest/cases/meta_ide.py index ffe0d2604d..c3a7df4cdf 100644 --- a/meta/lib/oeqa/selftest/cases/meta_ide.py +++ b/meta/lib/oeqa/selftest/cases/meta_ide.py | |||
@@ -20,8 +20,8 @@ class MetaIDE(OESelftestTestCase): | |||
20 | bitbake('meta-ide-support') | 20 | bitbake('meta-ide-support') |
21 | bitbake('build-sysroots -c build_native_sysroot') | 21 | bitbake('build-sysroots -c build_native_sysroot') |
22 | bitbake('build-sysroots -c build_target_sysroot') | 22 | bitbake('build-sysroots -c build_target_sysroot') |
23 | bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE']) | 23 | bb_vars = get_bb_vars(['MACHINE_ARCH', 'TARGET_VENDOR', 'TARGET_OS', 'DEPLOY_DIR_IMAGE', 'COREBASE']) |
24 | cls.environment_script = 'environment-setup-%s' % bb_vars['MULTIMACH_TARGET_SYS'] | 24 | cls.environment_script = 'environment-setup-%s%s-%s' % (bb_vars['MACHINE_ARCH'], bb_vars['TARGET_VENDOR'], bb_vars['TARGET_OS']) |
25 | cls.deploydir = bb_vars['DEPLOY_DIR_IMAGE'] | 25 | cls.deploydir = bb_vars['DEPLOY_DIR_IMAGE'] |
26 | cls.environment_script_path = '%s/%s' % (cls.deploydir, cls.environment_script) | 26 | cls.environment_script_path = '%s/%s' % (cls.deploydir, cls.environment_script) |
27 | cls.corebasedir = bb_vars['COREBASE'] | 27 | cls.corebasedir = bb_vars['COREBASE'] |
@@ -47,9 +47,9 @@ class MetaIDE(OESelftestTestCase): | |||
47 | "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz", | 47 | "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz", |
48 | self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir) | 48 | self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir) |
49 | self.project.download_archive() | 49 | self.project.download_archive() |
50 | self.assertEqual(self.project.run_configure('$CONFIGURE_FLAGS'), 0, | 50 | self.assertEqual(self.project.run_configure('CFLAGS="-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration" $CONFIGURE_FLAGS'), 0, |
51 | msg="Running configure failed") | 51 | msg="Running configure failed") |
52 | self.assertEqual(self.project.run_make(), 0, | 52 | self.assertEqual(self.project.run_make(make_args="CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration'"), 0, |
53 | msg="Running make failed") | 53 | msg="Running make failed") |
54 | self.assertEqual(self.project.run_install(), 0, | 54 | self.assertEqual(self.project.run_install(), 0, |
55 | msg="Running make install failed") | 55 | msg="Running make install failed") |
diff --git a/meta/lib/oeqa/selftest/cases/minidebuginfo.py b/meta/lib/oeqa/selftest/cases/minidebuginfo.py index 2919f07939..a8923460f9 100644 --- a/meta/lib/oeqa/selftest/cases/minidebuginfo.py +++ b/meta/lib/oeqa/selftest/cases/minidebuginfo.py | |||
@@ -8,6 +8,7 @@ import subprocess | |||
8 | import tempfile | 8 | import tempfile |
9 | import shutil | 9 | import shutil |
10 | 10 | ||
11 | from oeqa.core.decorator import OETestTag | ||
11 | from oeqa.selftest.case import OESelftestTestCase | 12 | from oeqa.selftest.case import OESelftestTestCase |
12 | from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd | 13 | from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd |
13 | 14 | ||
@@ -42,3 +43,18 @@ IMAGE_FSTYPES = "tar.bz2" | |||
42 | native_sysroot = native_sysroot, target_sys = target_sys) | 43 | native_sysroot = native_sysroot, target_sys = target_sys) |
43 | self.assertIn(".gnu_debugdata", r.output) | 44 | self.assertIn(".gnu_debugdata", r.output) |
44 | 45 | ||
46 | @OETestTag("runqemu") | ||
47 | def test_minidebuginfo_qemu(self): | ||
48 | """ | ||
49 | Test minidebuginfo inside a qemu. | ||
50 | This runs test_systemd_coredump_minidebuginfo and other minidebuginfo runtime tests which may be added in the future. | ||
51 | """ | ||
52 | |||
53 | self.write_config(""" | ||
54 | DISTRO_FEATURES:append = " minidebuginfo" | ||
55 | INIT_MANAGER = "systemd" | ||
56 | IMAGE_CLASSES += "testimage" | ||
57 | TEST_SUITES = "ping ssh systemd" | ||
58 | """) | ||
59 | bitbake('core-image-minimal') | ||
60 | bitbake('-c testimage core-image-minimal') | ||
diff --git a/meta/lib/oeqa/selftest/cases/oescripts.py b/meta/lib/oeqa/selftest/cases/oescripts.py index f69efccfee..3f9899b289 100644 --- a/meta/lib/oeqa/selftest/cases/oescripts.py +++ b/meta/lib/oeqa/selftest/cases/oescripts.py | |||
@@ -9,33 +9,9 @@ import shutil | |||
9 | import importlib | 9 | import importlib |
10 | import unittest | 10 | import unittest |
11 | from oeqa.selftest.case import OESelftestTestCase | 11 | from oeqa.selftest.case import OESelftestTestCase |
12 | from oeqa.selftest.cases.buildhistory import BuildhistoryBase | ||
13 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var | 12 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var |
14 | from oeqa.utils import CommandError | 13 | from oeqa.utils import CommandError |
15 | 14 | ||
16 | class BuildhistoryDiffTests(BuildhistoryBase): | ||
17 | |||
18 | def test_buildhistory_diff(self): | ||
19 | target = 'xcursor-transparent-theme' | ||
20 | self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True) | ||
21 | self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True) | ||
22 | result = runCmd("oe-pkgdata-util read-value PKGV %s" % target) | ||
23 | pkgv = result.output.rstrip() | ||
24 | result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR')) | ||
25 | expected_endlines = [ | ||
26 | "xcursor-transparent-theme-dev: RRECOMMENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv), | ||
27 | "xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv) | ||
28 | ] | ||
29 | for line in result.output.splitlines(): | ||
30 | for el in expected_endlines: | ||
31 | if line.endswith(el): | ||
32 | expected_endlines.remove(el) | ||
33 | break | ||
34 | else: | ||
35 | self.fail('Unexpected line:\n%s\nExpected line endings:\n %s' % (line, '\n '.join(expected_endlines))) | ||
36 | if expected_endlines: | ||
37 | self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines)) | ||
38 | |||
39 | @unittest.skipUnless(importlib.util.find_spec("cairo"), "Python cairo module is not present") | 15 | @unittest.skipUnless(importlib.util.find_spec("cairo"), "Python cairo module is not present") |
40 | class OEPybootchartguyTests(OESelftestTestCase): | 16 | class OEPybootchartguyTests(OESelftestTestCase): |
41 | 17 | ||
@@ -175,7 +151,7 @@ class OEListPackageconfigTests(OESelftestTestCase): | |||
175 | def test_packageconfig_flags_option_all(self): | 151 | def test_packageconfig_flags_option_all(self): |
176 | results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir) | 152 | results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir) |
177 | expected_endlines = [] | 153 | expected_endlines = [] |
178 | expected_endlines.append("pinentry-1.2.1") | 154 | expected_endlines.append("pinentry-1.3.1") |
179 | expected_endlines.append("PACKAGECONFIG ncurses") | 155 | expected_endlines.append("PACKAGECONFIG ncurses") |
180 | expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase") | 156 | expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase") |
181 | expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0") | 157 | expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0") |
diff --git a/meta/lib/oeqa/selftest/cases/overlayfs.py b/meta/lib/oeqa/selftest/cases/overlayfs.py index e31063567b..580fbdcb9c 100644 --- a/meta/lib/oeqa/selftest/cases/overlayfs.py +++ b/meta/lib/oeqa/selftest/cases/overlayfs.py | |||
@@ -5,7 +5,7 @@ | |||
5 | # | 5 | # |
6 | 6 | ||
7 | from oeqa.selftest.case import OESelftestTestCase | 7 | from oeqa.selftest.case import OESelftestTestCase |
8 | from oeqa.utils.commands import bitbake, runqemu | 8 | from oeqa.utils.commands import bitbake, runqemu, get_bb_vars |
9 | from oeqa.core.decorator import OETestTag | 9 | from oeqa.core.decorator import OETestTag |
10 | from oeqa.core.decorator.data import skipIfNotMachine | 10 | from oeqa.core.decorator.data import skipIfNotMachine |
11 | 11 | ||
@@ -466,6 +466,45 @@ IMAGE_INSTALL:append = " overlayfs-user" | |||
466 | line = getline_qemu(output, "Read-only file system") | 466 | line = getline_qemu(output, "Read-only file system") |
467 | self.assertTrue(line, msg=output) | 467 | self.assertTrue(line, msg=output) |
468 | 468 | ||
469 | @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently") | ||
470 | def test_postinst_on_target_for_read_only_rootfs(self): | ||
471 | """ | ||
472 | Summary: The purpose of this test case is to verify that post-installation | ||
473 | on target scripts are executed even if using read-only rootfs when | ||
474 | read-only-rootfs-delayed-postinsts is set | ||
475 | Expected: The test files are created on first boot | ||
476 | """ | ||
477 | |||
478 | import oe.path | ||
479 | |||
480 | vars = get_bb_vars(("IMAGE_ROOTFS", "sysconfdir"), "core-image-minimal") | ||
481 | sysconfdir = vars["sysconfdir"] | ||
482 | self.assertIsNotNone(sysconfdir) | ||
483 | # Need to use oe.path here as sysconfdir starts with / | ||
484 | targettestdir = os.path.join(sysconfdir, "postinst-test") | ||
485 | |||
486 | config = self.get_working_config() | ||
487 | |||
488 | args = { | ||
489 | 'OVERLAYFS_INIT_OPTION': "", | ||
490 | 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': 1, | ||
491 | 'OVERLAYFS_ROOTFS_TYPE': "ext4", | ||
492 | 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': 1 | ||
493 | } | ||
494 | |||
495 | # read-only-rootfs is already set in get_working_config() | ||
496 | config += 'EXTRA_IMAGE_FEATURES += "read-only-rootfs-delayed-postinsts"\n' | ||
497 | config += 'CORE_IMAGE_EXTRA_INSTALL = "postinst-delayed-b"\n' | ||
498 | |||
499 | self.write_config(config.format(**args)) | ||
500 | |||
501 | res = bitbake('core-image-minimal') | ||
502 | |||
503 | with runqemu('core-image-minimal', image_fstype='wic') as qemu: | ||
504 | for filename in ("rootfs", "delayed-a", "delayed-b"): | ||
505 | status, output = qemu.run_serial("test -f %s && echo found" % os.path.join(targettestdir, filename)) | ||
506 | self.assertIn("found", output, "%s was not present on boot" % filename) | ||
507 | |||
469 | def get_working_config(self): | 508 | def get_working_config(self): |
470 | return """ | 509 | return """ |
471 | # Use systemd as init manager | 510 | # Use systemd as init manager |
diff --git a/meta/lib/oeqa/selftest/cases/package.py b/meta/lib/oeqa/selftest/cases/package.py index 1aa6c03f8a..38ed7173fe 100644 --- a/meta/lib/oeqa/selftest/cases/package.py +++ b/meta/lib/oeqa/selftest/cases/package.py | |||
@@ -103,11 +103,37 @@ class PackageTests(OESelftestTestCase): | |||
103 | 103 | ||
104 | dest = get_bb_var('PKGDEST', 'selftest-hardlink') | 104 | dest = get_bb_var('PKGDEST', 'selftest-hardlink') |
105 | bindir = get_bb_var('bindir', 'selftest-hardlink') | 105 | bindir = get_bb_var('bindir', 'selftest-hardlink') |
106 | libdir = get_bb_var('libdir', 'selftest-hardlink') | ||
107 | libexecdir = get_bb_var('libexecdir', 'selftest-hardlink') | ||
106 | 108 | ||
107 | def checkfiles(): | 109 | def checkfiles(): |
108 | # Recipe creates 4 hardlinked files, there is a copy in package/ and a copy in packages-split/ | 110 | # Recipe creates 4 hardlinked files, there is a copy in package/ and a copy in packages-split/ |
109 | # so expect 8 in total. | 111 | # so expect 8 in total. |
110 | self.assertEqual(os.stat(dest + "/selftest-hardlink" + bindir + "/hello1").st_nlink, 8) | 112 | self.assertEqual(os.stat(dest + "/selftest-hardlink" + bindir + "/hello1").st_nlink, 8) |
113 | self.assertEqual(os.stat(dest + "/selftest-hardlink" + libexecdir + "/hello3").st_nlink, 8) | ||
114 | |||
115 | # Check dbg version | ||
116 | # 2 items, a copy in both package/packages-split so 4 | ||
117 | self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + bindir + "/.debug/hello1").st_nlink, 4) | ||
118 | self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello1").st_nlink, 4) | ||
119 | |||
120 | # Even though the libexecdir name is 'hello3' or 'hello4', that isn't the debug target name | ||
121 | self.assertEqual(os.path.exists(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello3"), False) | ||
122 | self.assertEqual(os.path.exists(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello4"), False) | ||
123 | |||
124 | # Check the staticdev libraries | ||
125 | # 101 items, a copy in both package/packages-split so 202 | ||
126 | self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello.a").st_nlink, 202) | ||
127 | self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-25.a").st_nlink, 202) | ||
128 | self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-50.a").st_nlink, 202) | ||
129 | self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-75.a").st_nlink, 202) | ||
130 | |||
131 | # Check static dbg | ||
132 | # 101 items, a copy in both package/packages-split so 202 | ||
133 | self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello.a").st_nlink, 202) | ||
134 | self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-25.a").st_nlink, 202) | ||
135 | self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-50.a").st_nlink, 202) | ||
136 | self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-75.a").st_nlink, 202) | ||
111 | 137 | ||
112 | # Test a sparse file remains sparse | 138 | # Test a sparse file remains sparse |
113 | sparsestat = os.stat(dest + "/selftest-hardlink" + bindir + "/sparsetest") | 139 | sparsestat = os.stat(dest + "/selftest-hardlink" + bindir + "/sparsetest") |
diff --git a/meta/lib/oeqa/selftest/cases/picolibc.py b/meta/lib/oeqa/selftest/cases/picolibc.py new file mode 100644 index 0000000000..e40b4fc3d3 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/picolibc.py | |||
@@ -0,0 +1,18 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | from oeqa.selftest.case import OESelftestTestCase | ||
8 | from oeqa.utils.commands import bitbake, get_bb_var | ||
9 | |||
10 | class PicolibcTest(OESelftestTestCase): | ||
11 | |||
12 | def test_picolibc(self): | ||
13 | compatible_machines = ['qemuarm', 'qemuarm64', 'qemuriscv32', 'qemuriscv64'] | ||
14 | machine = get_bb_var('MACHINE') | ||
15 | if machine not in compatible_machines: | ||
16 | self.skipTest('This test only works with machines : %s' % ' '.join(compatible_machines)) | ||
17 | self.write_config('TCLIBC = "picolibc"') | ||
18 | bitbake("picolibc-helloworld") | ||
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py index 42202b7831..0bd724c8ee 100644 --- a/meta/lib/oeqa/selftest/cases/recipetool.py +++ b/meta/lib/oeqa/selftest/cases/recipetool.py | |||
@@ -385,7 +385,7 @@ class RecipetoolCreateTests(RecipetoolBase): | |||
385 | checkvars = {} | 385 | checkvars = {} |
386 | checkvars['LICENSE'] = 'LGPL-2.1-only' | 386 | checkvars['LICENSE'] = 'LGPL-2.1-only' |
387 | checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34' | 387 | checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34' |
388 | checkvars['S'] = '${WORKDIR}/git' | 388 | checkvars['S'] = None |
389 | checkvars['PV'] = '1.11+git' | 389 | checkvars['PV'] = '1.11+git' |
390 | checkvars['SRC_URI'] = srcuri + ';branch=master' | 390 | checkvars['SRC_URI'] = srcuri + ';branch=master' |
391 | checkvars['DEPENDS'] = set(['libcheck', 'libjpeg-turbo', 'libpng', 'libx11', 'libxext', 'pango']) | 391 | checkvars['DEPENDS'] = set(['libcheck', 'libjpeg-turbo', 'libpng', 'libx11', 'libxext', 'pango']) |
@@ -757,235 +757,42 @@ class RecipetoolCreateTests(RecipetoolBase): | |||
757 | 757 | ||
758 | def test_recipetool_create_go(self): | 758 | def test_recipetool_create_go(self): |
759 | # Basic test to check go recipe generation | 759 | # Basic test to check go recipe generation |
760 | self.maxDiff = None | ||
761 | |||
760 | temprecipe = os.path.join(self.tempdir, 'recipe') | 762 | temprecipe = os.path.join(self.tempdir, 'recipe') |
761 | os.makedirs(temprecipe) | 763 | os.makedirs(temprecipe) |
762 | 764 | ||
763 | recipefile = os.path.join(temprecipe, 'edgex-go_git.bb') | 765 | recipefile = os.path.join(temprecipe, 'recipetool-go-test_git.bb') |
764 | deps_require_file = os.path.join(temprecipe, 'edgex-go', 'edgex-go-modules.inc') | ||
765 | lics_require_file = os.path.join(temprecipe, 'edgex-go', 'edgex-go-licenses.inc') | ||
766 | modules_txt_file = os.path.join(temprecipe, 'edgex-go', 'modules.txt') | ||
767 | 766 | ||
768 | srcuri = 'https://github.com/edgexfoundry/edgex-go.git' | 767 | srcuri = 'https://git.yoctoproject.org/recipetool-go-test.git' |
769 | srcrev = "v3.0.0" | 768 | srcrev = "c3e213c01b6c1406b430df03ef0d1ae77de5d2f7" |
770 | srcbranch = "main" | 769 | srcbranch = "main" |
771 | 770 | ||
772 | result = runCmd('recipetool create -o %s %s -S %s -B %s' % (temprecipe, srcuri, srcrev, srcbranch)) | 771 | result = runCmd('recipetool create -o %s %s -S %s -B %s' % (temprecipe, srcuri, srcrev, srcbranch)) |
773 | 772 | ||
774 | self.maxDiff = None | 773 | inherits = ['go-mod', 'go-mod-update-modules'] |
775 | inherits = ['go-vendor'] | ||
776 | 774 | ||
777 | checkvars = {} | 775 | checkvars = {} |
778 | checkvars['GO_IMPORT'] = "github.com/edgexfoundry/edgex-go" | 776 | checkvars['GO_IMPORT'] = "git.yoctoproject.org/recipetool-go-test" |
779 | checkvars['SRC_URI'] = {'git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https', | 777 | checkvars['SRC_URI'] = {'git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}'} |
780 | 'file://modules.txt'} | 778 | checkvars['LIC_FILES_CHKSUM'] = { |
781 | checkvars['LIC_FILES_CHKSUM'] = {'file://src/${GO_IMPORT}/LICENSE;md5=8f8bc924cf73f6a32381e5fd4c58d603'} | 779 | 'file://src/${GO_IMPORT}/LICENSE;md5=4e3933dd47afbf115e484d11385fb3bd', |
780 | 'file://src/${GO_IMPORT}/is/LICENSE;md5=62beaee5a116dd1e80161667b1df39ab' | ||
781 | } | ||
782 | 782 | ||
783 | self.assertTrue(os.path.isfile(recipefile)) | ||
784 | self._test_recipe_contents(recipefile, checkvars, inherits) | 783 | self._test_recipe_contents(recipefile, checkvars, inherits) |
784 | self.assertNotIn('Traceback', result.output) | ||
785 | 785 | ||
786 | lics_require_file = os.path.join(temprecipe, 'recipetool-go-test-licenses.inc') | ||
787 | self.assertFileExists(lics_require_file) | ||
786 | checkvars = {} | 788 | checkvars = {} |
787 | checkvars['VENDORED_LIC_FILES_CHKSUM'] = set( | 789 | checkvars['LIC_FILES_CHKSUM'] = {'file://pkg/mod/github.com/godbus/dbus/v5@v5.1.0/LICENSE;md5=09042bd5c6c96a2b9e45ddf1bc517eed;spdx=BSD-2-Clause'} |
788 | ['file://src/${GO_IMPORT}/vendor/github.com/Microsoft/go-winio/LICENSE;md5=69205ff73858f2c22b2ca135b557e8ef', | ||
789 | 'file://src/${GO_IMPORT}/vendor/github.com/armon/go-metrics/LICENSE;md5=d2d77030c0183e3d1e66d26dc1f243be', | ||
790 | 'file://src/${GO_IMPORT}/vendor/github.com/cenkalti/backoff/LICENSE;md5=1571d94433e3f3aa05267efd4dbea68b', | ||
791 | 'file://src/${GO_IMPORT}/vendor/github.com/davecgh/go-spew/LICENSE;md5=c06795ed54b2a35ebeeb543cd3a73e56', | ||
792 | 'file://src/${GO_IMPORT}/vendor/github.com/eclipse/paho.mqtt.golang/LICENSE;md5=dcdb33474b60c38efd27356d8f2edec7', | ||
793 | 'file://src/${GO_IMPORT}/vendor/github.com/eclipse/paho.mqtt.golang/edl-v10;md5=3adfcc70f5aeb7a44f3f9b495aa1fbf3', | ||
794 | 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-bootstrap/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff', | ||
795 | 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-configuration/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff', | ||
796 | 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-core-contracts/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff', | ||
797 | 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-messaging/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff', | ||
798 | 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-registry/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff', | ||
799 | 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-secrets/v3/LICENSE;md5=f9fa2f4f8e0ef8cc7b5dd150963eb457', | ||
800 | 'file://src/${GO_IMPORT}/vendor/github.com/fatih/color/LICENSE.md;md5=316e6d590bdcde7993fb175662c0dd5a', | ||
801 | 'file://src/${GO_IMPORT}/vendor/github.com/fxamacker/cbor/v2/LICENSE;md5=827f5a2fa861382d35a3943adf9ebb86', | ||
802 | 'file://src/${GO_IMPORT}/vendor/github.com/go-jose/go-jose/v3/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57', | ||
803 | 'file://src/${GO_IMPORT}/vendor/github.com/go-jose/go-jose/v3/json/LICENSE;md5=591778525c869cdde0ab5a1bf283cd81', | ||
804 | 'file://src/${GO_IMPORT}/vendor/github.com/go-kit/log/LICENSE;md5=5b7c15ad5fffe2ff6e9d58a6c161f082', | ||
805 | 'file://src/${GO_IMPORT}/vendor/github.com/go-logfmt/logfmt/LICENSE;md5=98e39517c38127f969de33057067091e', | ||
806 | 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/locales/LICENSE;md5=3ccbda375ee345400ad1da85ba522301', | ||
807 | 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/universal-translator/LICENSE;md5=2e2b21ef8f61057977d27c727c84bef1', | ||
808 | 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/validator/v10/LICENSE;md5=a718a0f318d76f7c5d510cbae84f0b60', | ||
809 | 'file://src/${GO_IMPORT}/vendor/github.com/go-redis/redis/v7/LICENSE;md5=58103aa5ea1ee9b7a369c9c4a95ef9b5', | ||
810 | 'file://src/${GO_IMPORT}/vendor/github.com/golang/protobuf/LICENSE;md5=939cce1ec101726fa754e698ac871622', | ||
811 | 'file://src/${GO_IMPORT}/vendor/github.com/gomodule/redigo/LICENSE;md5=2ee41112a44fe7014dce33e26468ba93', | ||
812 | 'file://src/${GO_IMPORT}/vendor/github.com/google/uuid/LICENSE;md5=88073b6dd8ec00fe09da59e0b6dfded1', | ||
813 | 'file://src/${GO_IMPORT}/vendor/github.com/gorilla/mux/LICENSE;md5=33fa1116c45f9e8de714033f99edde13', | ||
814 | 'file://src/${GO_IMPORT}/vendor/github.com/gorilla/websocket/LICENSE;md5=c007b54a1743d596f46b2748d9f8c044', | ||
815 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/consul/api/LICENSE;md5=b8a277a612171b7526e9be072f405ef4', | ||
816 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/errwrap/LICENSE;md5=b278a92d2c1509760384428817710378', | ||
817 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-cleanhttp/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea', | ||
818 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-hclog/LICENSE;md5=ec7f605b74b9ad03347d0a93a5cc7eb8', | ||
819 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-immutable-radix/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea', | ||
820 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-multierror/LICENSE;md5=d44fdeb607e2d2614db9464dbedd4094', | ||
821 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-rootcerts/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea', | ||
822 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/golang-lru/LICENSE;md5=f27a50d2e878867827842f2c60e30bfc', | ||
823 | 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/serf/LICENSE;md5=b278a92d2c1509760384428817710378', | ||
824 | 'file://src/${GO_IMPORT}/vendor/github.com/leodido/go-urn/LICENSE;md5=8f50db5538ec1148a9b3d14ed96c3418', | ||
825 | 'file://src/${GO_IMPORT}/vendor/github.com/mattn/go-colorable/LICENSE;md5=24ce168f90aec2456a73de1839037245', | ||
826 | 'file://src/${GO_IMPORT}/vendor/github.com/mattn/go-isatty/LICENSE;md5=f509beadd5a11227c27b5d2ad6c9f2c6', | ||
827 | 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/consulstructure/LICENSE;md5=96ada10a9e51c98c4656f2cede08c673', | ||
828 | 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/copystructure/LICENSE;md5=56da355a12d4821cda57b8f23ec34bc4', | ||
829 | 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/go-homedir/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd', | ||
830 | 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/mapstructure/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd', | ||
831 | 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/reflectwalk/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd', | ||
832 | 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nats.go/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327', | ||
833 | 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nkeys/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327', | ||
834 | 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nuid/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327', | ||
835 | 'file://src/${GO_IMPORT}/vendor/github.com/pmezard/go-difflib/LICENSE;md5=e9a2ebb8de779a07500ddecca806145e', | ||
836 | 'file://src/${GO_IMPORT}/vendor/github.com/rcrowley/go-metrics/LICENSE;md5=1bdf5d819f50f141366dabce3be1460f', | ||
837 | 'file://src/${GO_IMPORT}/vendor/github.com/spiffe/go-spiffe/v2/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327', | ||
838 | 'file://src/${GO_IMPORT}/vendor/github.com/stretchr/objx/LICENSE;md5=d023fd31d3ca39ec61eec65a91732735', | ||
839 | 'file://src/${GO_IMPORT}/vendor/github.com/stretchr/testify/LICENSE;md5=188f01994659f3c0d310612333d2a26f', | ||
840 | 'file://src/${GO_IMPORT}/vendor/github.com/x448/float16/LICENSE;md5=de8f8e025d57fe7ee0b67f30d571323b', | ||
841 | 'file://src/${GO_IMPORT}/vendor/github.com/zeebo/errs/LICENSE;md5=84914ab36fc0eb48edbaa53e66e8d326', | ||
842 | 'file://src/${GO_IMPORT}/vendor/golang.org/x/crypto/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707', | ||
843 | 'file://src/${GO_IMPORT}/vendor/golang.org/x/mod/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707', | ||
844 | 'file://src/${GO_IMPORT}/vendor/golang.org/x/net/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707', | ||
845 | 'file://src/${GO_IMPORT}/vendor/golang.org/x/sync/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707', | ||
846 | 'file://src/${GO_IMPORT}/vendor/golang.org/x/sys/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707', | ||
847 | 'file://src/${GO_IMPORT}/vendor/golang.org/x/text/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707', | ||
848 | 'file://src/${GO_IMPORT}/vendor/golang.org/x/tools/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707', | ||
849 | 'file://src/${GO_IMPORT}/vendor/google.golang.org/genproto/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57', | ||
850 | 'file://src/${GO_IMPORT}/vendor/google.golang.org/grpc/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57', | ||
851 | 'file://src/${GO_IMPORT}/vendor/google.golang.org/protobuf/LICENSE;md5=02d4002e9171d41a8fad93aa7faf3956', | ||
852 | 'file://src/${GO_IMPORT}/vendor/gopkg.in/eapache/queue.v1/LICENSE;md5=1bfd4408d3de090ef6b908b0cc45a316', | ||
853 | 'file://src/${GO_IMPORT}/vendor/gopkg.in/yaml.v3/LICENSE;md5=3c91c17266710e16afdbb2b6d15c761c']) | ||
854 | |||
855 | self.assertTrue(os.path.isfile(lics_require_file)) | ||
856 | self._test_recipe_contents(lics_require_file, checkvars, []) | 790 | self._test_recipe_contents(lics_require_file, checkvars, []) |
857 | 791 | ||
858 | dependencies = \ | 792 | deps_require_file = os.path.join(temprecipe, 'recipetool-go-test-go-mods.inc') |
859 | [ ('github.com/eclipse/paho.mqtt.golang','v1.4.2', '', '', ''), | 793 | self.assertFileExists(deps_require_file) |
860 | ('github.com/edgexfoundry/go-mod-bootstrap','v3.0.1','github.com/edgexfoundry/go-mod-bootstrap/v3','/v3', ''), | ||
861 | ('github.com/edgexfoundry/go-mod-configuration','v3.0.0','github.com/edgexfoundry/go-mod-configuration/v3','/v3', ''), | ||
862 | ('github.com/edgexfoundry/go-mod-core-contracts','v3.0.0','github.com/edgexfoundry/go-mod-core-contracts/v3','/v3', ''), | ||
863 | ('github.com/edgexfoundry/go-mod-messaging','v3.0.0','github.com/edgexfoundry/go-mod-messaging/v3','/v3', ''), | ||
864 | ('github.com/edgexfoundry/go-mod-secrets','v3.0.1','github.com/edgexfoundry/go-mod-secrets/v3','/v3', ''), | ||
865 | ('github.com/fxamacker/cbor','v2.4.0','github.com/fxamacker/cbor/v2','/v2', ''), | ||
866 | ('github.com/gomodule/redigo','v1.8.9', '', '', ''), | ||
867 | ('github.com/google/uuid','v1.3.0', '', '', ''), | ||
868 | ('github.com/gorilla/mux','v1.8.0', '', '', ''), | ||
869 | ('github.com/rcrowley/go-metrics','v0.0.0-20201227073835-cf1acfcdf475', '', '', ''), | ||
870 | ('github.com/spiffe/go-spiffe','v2.1.4','github.com/spiffe/go-spiffe/v2','/v2', ''), | ||
871 | ('github.com/stretchr/testify','v1.8.2', '', '', ''), | ||
872 | ('go.googlesource.com/crypto','v0.8.0','golang.org/x/crypto', '', ''), | ||
873 | ('gopkg.in/eapache/queue.v1','v1.1.0', '', '', ''), | ||
874 | ('gopkg.in/yaml.v3','v3.0.1', '', '', ''), | ||
875 | ('github.com/microsoft/go-winio','v0.6.0','github.com/Microsoft/go-winio', '', ''), | ||
876 | ('github.com/hashicorp/go-metrics','v0.3.10','github.com/armon/go-metrics', '', ''), | ||
877 | ('github.com/cenkalti/backoff','v2.2.1+incompatible', '', '', ''), | ||
878 | ('github.com/davecgh/go-spew','v1.1.1', '', '', ''), | ||
879 | ('github.com/edgexfoundry/go-mod-registry','v3.0.0','github.com/edgexfoundry/go-mod-registry/v3','/v3', ''), | ||
880 | ('github.com/fatih/color','v1.9.0', '', '', ''), | ||
881 | ('github.com/go-jose/go-jose','v3.0.0','github.com/go-jose/go-jose/v3','/v3', ''), | ||
882 | ('github.com/go-kit/log','v0.2.1', '', '', ''), | ||
883 | ('github.com/go-logfmt/logfmt','v0.5.1', '', '', ''), | ||
884 | ('github.com/go-playground/locales','v0.14.1', '', '', ''), | ||
885 | ('github.com/go-playground/universal-translator','v0.18.1', '', '', ''), | ||
886 | ('github.com/go-playground/validator','v10.13.0','github.com/go-playground/validator/v10','/v10', ''), | ||
887 | ('github.com/go-redis/redis','v7.3.0','github.com/go-redis/redis/v7','/v7', ''), | ||
888 | ('github.com/golang/protobuf','v1.5.2', '', '', ''), | ||
889 | ('github.com/gorilla/websocket','v1.4.2', '', '', ''), | ||
890 | ('github.com/hashicorp/consul','v1.20.0','github.com/hashicorp/consul/api', '', 'api'), | ||
891 | ('github.com/hashicorp/errwrap','v1.0.0', '', '', ''), | ||
892 | ('github.com/hashicorp/go-cleanhttp','v0.5.1', '', '', ''), | ||
893 | ('github.com/hashicorp/go-hclog','v0.14.1', '', '', ''), | ||
894 | ('github.com/hashicorp/go-immutable-radix','v1.3.0', '', '', ''), | ||
895 | ('github.com/hashicorp/go-multierror','v1.1.1', '', '', ''), | ||
896 | ('github.com/hashicorp/go-rootcerts','v1.0.2', '', '', ''), | ||
897 | ('github.com/hashicorp/golang-lru','v0.5.4', '', '', ''), | ||
898 | ('github.com/hashicorp/serf','v0.10.1', '', '', ''), | ||
899 | ('github.com/leodido/go-urn','v1.2.3', '', '', ''), | ||
900 | ('github.com/mattn/go-colorable','v0.1.12', '', '', ''), | ||
901 | ('github.com/mattn/go-isatty','v0.0.14', '', '', ''), | ||
902 | ('github.com/mitchellh/consulstructure','v0.0.0-20190329231841-56fdc4d2da54', '', '', ''), | ||
903 | ('github.com/mitchellh/copystructure','v1.2.0', '', '', ''), | ||
904 | ('github.com/mitchellh/go-homedir','v1.1.0', '', '', ''), | ||
905 | ('github.com/mitchellh/mapstructure','v1.5.0', '', '', ''), | ||
906 | ('github.com/mitchellh/reflectwalk','v1.0.2', '', '', ''), | ||
907 | ('github.com/nats-io/nats.go','v1.25.0', '', '', ''), | ||
908 | ('github.com/nats-io/nkeys','v0.4.4', '', '', ''), | ||
909 | ('github.com/nats-io/nuid','v1.0.1', '', '', ''), | ||
910 | ('github.com/pmezard/go-difflib','v1.0.0', '', '', ''), | ||
911 | ('github.com/stretchr/objx','v0.5.0', '', '', ''), | ||
912 | ('github.com/x448/float16','v0.8.4', '', '', ''), | ||
913 | ('github.com/zeebo/errs','v1.3.0', '', '', ''), | ||
914 | ('go.googlesource.com/mod','v0.8.0','golang.org/x/mod', '', ''), | ||
915 | ('go.googlesource.com/net','v0.9.0','golang.org/x/net', '', ''), | ||
916 | ('go.googlesource.com/sync','v0.1.0','golang.org/x/sync', '', ''), | ||
917 | ('go.googlesource.com/sys','v0.7.0','golang.org/x/sys', '', ''), | ||
918 | ('go.googlesource.com/text','v0.9.0','golang.org/x/text', '', ''), | ||
919 | ('go.googlesource.com/tools','v0.6.0','golang.org/x/tools', '', ''), | ||
920 | ('github.com/googleapis/go-genproto','v0.0.0-20230223222841-637eb2293923','google.golang.org/genproto', '', ''), | ||
921 | ('github.com/grpc/grpc-go','v1.53.0','google.golang.org/grpc', '', ''), | ||
922 | ('go.googlesource.com/protobuf','v1.28.1','google.golang.org/protobuf', '', ''), | ||
923 | ] | ||
924 | |||
925 | src_uri = set() | ||
926 | for d in dependencies: | ||
927 | src_uri.add(self._go_urifiy(*d)) | ||
928 | |||
929 | checkvars = {} | ||
930 | checkvars['GO_DEPENDENCIES_SRC_URI'] = src_uri | ||
931 | |||
932 | self.assertTrue(os.path.isfile(deps_require_file)) | ||
933 | self._test_recipe_contents(deps_require_file, checkvars, []) | ||
934 | |||
935 | def test_recipetool_create_go_replace_modules(self): | ||
936 | # Check handling of replaced modules | ||
937 | temprecipe = os.path.join(self.tempdir, 'recipe') | ||
938 | os.makedirs(temprecipe) | ||
939 | |||
940 | recipefile = os.path.join(temprecipe, 'openapi-generator_git.bb') | ||
941 | deps_require_file = os.path.join(temprecipe, 'openapi-generator', 'go-modules.inc') | ||
942 | lics_require_file = os.path.join(temprecipe, 'openapi-generator', 'go-licenses.inc') | ||
943 | modules_txt_file = os.path.join(temprecipe, 'openapi-generator', 'modules.txt') | ||
944 | |||
945 | srcuri = 'https://github.com/OpenAPITools/openapi-generator.git' | ||
946 | srcrev = "v7.2.0" | ||
947 | srcbranch = "master" | ||
948 | srcsubdir = "samples/openapi3/client/petstore/go" | ||
949 | |||
950 | result = runCmd('recipetool create -o %s %s -S %s -B %s --src-subdir %s' % (temprecipe, srcuri, srcrev, srcbranch, srcsubdir)) | ||
951 | |||
952 | self.maxDiff = None | ||
953 | inherits = ['go-vendor'] | ||
954 | |||
955 | checkvars = {} | ||
956 | checkvars['GO_IMPORT'] = "github.com/OpenAPITools/openapi-generator/samples/openapi3/client/petstore/go" | ||
957 | checkvars['SRC_URI'] = {'git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https', | ||
958 | 'file://modules.txt'} | ||
959 | |||
960 | self.assertNotIn('Traceback', result.output) | ||
961 | self.assertIn('No license file was detected for the main module', result.output) | ||
962 | self.assertTrue(os.path.isfile(recipefile)) | ||
963 | self._test_recipe_contents(recipefile, checkvars, inherits) | ||
964 | |||
965 | # make sure that dependencies don't mention local directory ./go-petstore | ||
966 | dependencies = \ | ||
967 | [ ('github.com/stretchr/testify','v1.8.4', '', '', ''), | ||
968 | ('go.googlesource.com/oauth2','v0.10.0','golang.org/x/oauth2', '', ''), | ||
969 | ('github.com/davecgh/go-spew','v1.1.1', '', '', ''), | ||
970 | ('github.com/golang/protobuf','v1.5.3', '', '', ''), | ||
971 | ('github.com/kr/pretty','v0.3.0', '', '', ''), | ||
972 | ('github.com/pmezard/go-difflib','v1.0.0', '', '', ''), | ||
973 | ('github.com/rogpeppe/go-internal','v1.9.0', '', '', ''), | ||
974 | ('go.googlesource.com/net','v0.12.0','golang.org/x/net', '', ''), | ||
975 | ('github.com/golang/appengine','v1.6.7','google.golang.org/appengine', '', ''), | ||
976 | ('go.googlesource.com/protobuf','v1.31.0','google.golang.org/protobuf', '', ''), | ||
977 | ('gopkg.in/check.v1','v1.0.0-20201130134442-10cb98267c6c', '', '', ''), | ||
978 | ('gopkg.in/yaml.v3','v3.0.1', '', '', ''), | ||
979 | ] | ||
980 | |||
981 | src_uri = set() | ||
982 | for d in dependencies: | ||
983 | src_uri.add(self._go_urifiy(*d)) | ||
984 | |||
985 | checkvars = {} | 794 | checkvars = {} |
986 | checkvars['GO_DEPENDENCIES_SRC_URI'] = src_uri | 795 | checkvars['SRC_URI'] = {'gomod://github.com/godbus/dbus/v5;version=v5.1.0;sha256sum=03dfa8e71089a6f477310d15c4d3a036d82d028532881b50fee254358e782ad9'} |
987 | |||
988 | self.assertTrue(os.path.isfile(deps_require_file)) | ||
989 | self._test_recipe_contents(deps_require_file, checkvars, []) | 796 | self._test_recipe_contents(deps_require_file, checkvars, []) |
990 | 797 | ||
991 | class RecipetoolTests(RecipetoolBase): | 798 | class RecipetoolTests(RecipetoolBase): |
@@ -1068,6 +875,7 @@ class RecipetoolTests(RecipetoolBase): | |||
1068 | 875 | ||
1069 | d = DataConnectorCopy | 876 | d = DataConnectorCopy |
1070 | d.getVar = Mock(return_value=commonlicdir) | 877 | d.getVar = Mock(return_value=commonlicdir) |
878 | d.expand = Mock(side_effect=lambda x: x) | ||
1071 | 879 | ||
1072 | srctree = tempfile.mkdtemp(prefix='recipetoolqa') | 880 | srctree = tempfile.mkdtemp(prefix='recipetoolqa') |
1073 | self.track_for_cleanup(srctree) | 881 | self.track_for_cleanup(srctree) |
@@ -1323,10 +1131,10 @@ class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase): | |||
1323 | 1131 | ||
1324 | def test_recipetool_appendsrcfile_srcdir_basic(self): | 1132 | def test_recipetool_appendsrcfile_srcdir_basic(self): |
1325 | testrecipe = 'bash' | 1133 | testrecipe = 'bash' |
1326 | bb_vars = get_bb_vars(['S', 'WORKDIR'], testrecipe) | 1134 | bb_vars = get_bb_vars(['S', 'UNPACKDIR'], testrecipe) |
1327 | srcdir = bb_vars['S'] | 1135 | srcdir = bb_vars['S'] |
1328 | workdir = bb_vars['WORKDIR'] | 1136 | unpackdir = bb_vars['UNPACKDIR'] |
1329 | subdir = os.path.relpath(srcdir, workdir) | 1137 | subdir = os.path.relpath(srcdir, unpackdir) |
1330 | self._test_appendsrcfile(testrecipe, 'a-file', srcdir=subdir) | 1138 | self._test_appendsrcfile(testrecipe, 'a-file', srcdir=subdir) |
1331 | 1139 | ||
1332 | def test_recipetool_appendsrcfile_existing_in_src_uri(self): | 1140 | def test_recipetool_appendsrcfile_existing_in_src_uri(self): |
@@ -1375,10 +1183,10 @@ class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase): | |||
1375 | def test_recipetool_appendsrcfile_replace_file_srcdir(self): | 1183 | def test_recipetool_appendsrcfile_replace_file_srcdir(self): |
1376 | testrecipe = 'bash' | 1184 | testrecipe = 'bash' |
1377 | filepath = 'Makefile.in' | 1185 | filepath = 'Makefile.in' |
1378 | bb_vars = get_bb_vars(['S', 'WORKDIR'], testrecipe) | 1186 | bb_vars = get_bb_vars(['S', 'UNPACKDIR'], testrecipe) |
1379 | srcdir = bb_vars['S'] | 1187 | srcdir = bb_vars['S'] |
1380 | workdir = bb_vars['WORKDIR'] | 1188 | unpackdir = bb_vars['UNPACKDIR'] |
1381 | subdir = os.path.relpath(srcdir, workdir) | 1189 | subdir = os.path.relpath(srcdir, unpackdir) |
1382 | 1190 | ||
1383 | self._test_appendsrcfile(testrecipe, filepath, srcdir=subdir) | 1191 | self._test_appendsrcfile(testrecipe, filepath, srcdir=subdir) |
1384 | bitbake('%s:do_unpack' % testrecipe) | 1192 | bitbake('%s:do_unpack' % testrecipe) |
diff --git a/meta/lib/oeqa/selftest/cases/recipeutils.py b/meta/lib/oeqa/selftest/cases/recipeutils.py index 9949737172..e697fd2920 100644 --- a/meta/lib/oeqa/selftest/cases/recipeutils.py +++ b/meta/lib/oeqa/selftest/cases/recipeutils.py | |||
@@ -72,7 +72,7 @@ class RecipeUtilsTests(OESelftestTestCase): | |||
72 | expected_patch = """ | 72 | expected_patch = """ |
73 | --- a/recipes-test/recipeutils/recipeutils-test_1.2.bb | 73 | --- a/recipes-test/recipeutils/recipeutils-test_1.2.bb |
74 | +++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb | 74 | +++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb |
75 | @@ -11,6 +11,4 @@ | 75 | @@ -10,6 +10,4 @@ |
76 | 76 | ||
77 | BBCLASSEXTEND = "native nativesdk" | 77 | BBCLASSEXTEND = "native nativesdk" |
78 | 78 | ||
@@ -97,7 +97,7 @@ class RecipeUtilsTests(OESelftestTestCase): | |||
97 | expected_patch = """ | 97 | expected_patch = """ |
98 | --- a/recipes-test/recipeutils/recipeutils-test_1.2.bb | 98 | --- a/recipes-test/recipeutils/recipeutils-test_1.2.bb |
99 | +++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb | 99 | +++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb |
100 | @@ -11,6 +11,3 @@ | 100 | @@ -10,6 +10,3 @@ |
101 | 101 | ||
102 | BBCLASSEXTEND = "native nativesdk" | 102 | BBCLASSEXTEND = "native nativesdk" |
103 | 103 | ||
diff --git a/meta/lib/oeqa/selftest/cases/reproducible.py b/meta/lib/oeqa/selftest/cases/reproducible.py index 80e830136f..f06027cb03 100644 --- a/meta/lib/oeqa/selftest/cases/reproducible.py +++ b/meta/lib/oeqa/selftest/cases/reproducible.py | |||
@@ -97,8 +97,10 @@ def compare_file(reference, test, diffutils_sysroot): | |||
97 | result.status = SAME | 97 | result.status = SAME |
98 | return result | 98 | return result |
99 | 99 | ||
100 | def run_diffoscope(a_dir, b_dir, html_dir, max_report_size=0, **kwargs): | 100 | def run_diffoscope(a_dir, b_dir, html_dir, max_report_size=0, max_diff_block_lines=1024, max_diff_block_lines_saved=0, **kwargs): |
101 | return runCmd(['diffoscope', '--no-default-limits', '--max-report-size', str(max_report_size), | 101 | return runCmd(['diffoscope', '--no-default-limits', '--max-report-size', str(max_report_size), |
102 | '--max-diff-block-lines-saved', str(max_diff_block_lines_saved), | ||
103 | '--max-diff-block-lines', str(max_diff_block_lines), | ||
102 | '--exclude-directory-metadata', 'yes', '--html-dir', html_dir, a_dir, b_dir], | 104 | '--exclude-directory-metadata', 'yes', '--html-dir', html_dir, a_dir, b_dir], |
103 | **kwargs) | 105 | **kwargs) |
104 | 106 | ||
@@ -132,8 +134,14 @@ class ReproducibleTests(OESelftestTestCase): | |||
132 | # Maximum report size, in bytes | 134 | # Maximum report size, in bytes |
133 | max_report_size = 250 * 1024 * 1024 | 135 | max_report_size = 250 * 1024 * 1024 |
134 | 136 | ||
137 | # Maximum diff blocks size, in lines | ||
138 | max_diff_block_lines = 1024 | ||
139 | # Maximum diff blocks size (saved in memory), in lines | ||
140 | max_diff_block_lines_saved = max_diff_block_lines | ||
141 | |||
135 | # targets are the things we want to test the reproducibility of | 142 | # targets are the things we want to test the reproducibility of |
136 | targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world'] | 143 | # Have to add the virtual targets manually for now as builds may or may not include them as they're exclude from world |
144 | targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world', 'virtual/librpc', 'virtual/libsdl2', 'virtual/crypt'] | ||
137 | 145 | ||
138 | # sstate targets are things to pull from sstate to potentially cut build/debugging time | 146 | # sstate targets are things to pull from sstate to potentially cut build/debugging time |
139 | sstate_targets = [] | 147 | sstate_targets = [] |
@@ -161,6 +169,7 @@ class ReproducibleTests(OESelftestTestCase): | |||
161 | 'OEQA_REPRODUCIBLE_TEST_TARGET', | 169 | 'OEQA_REPRODUCIBLE_TEST_TARGET', |
162 | 'OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS', | 170 | 'OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS', |
163 | 'OEQA_REPRODUCIBLE_EXCLUDED_PACKAGES', | 171 | 'OEQA_REPRODUCIBLE_EXCLUDED_PACKAGES', |
172 | 'OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS', | ||
164 | ] | 173 | ] |
165 | bb_vars = get_bb_vars(needed_vars) | 174 | bb_vars = get_bb_vars(needed_vars) |
166 | for v in needed_vars: | 175 | for v in needed_vars: |
@@ -169,19 +178,20 @@ class ReproducibleTests(OESelftestTestCase): | |||
169 | if bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE']: | 178 | if bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE']: |
170 | self.package_classes = bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE'].split() | 179 | self.package_classes = bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE'].split() |
171 | 180 | ||
172 | if bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET']: | 181 | if bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET'] or bb_vars['OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS']: |
173 | self.targets = bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET'].split() | 182 | self.targets = (bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET'] or "").split() + (bb_vars['OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS'] or "").split() |
174 | 183 | ||
175 | if bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS']: | 184 | if bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS']: |
176 | self.sstate_targets = bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS'].split() | 185 | self.sstate_targets = bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS'].split() |
177 | 186 | ||
187 | if bb_vars['OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS']: | ||
188 | # Setup to build every DEPENDS of leaf recipes using sstate | ||
189 | for leaf_recipe in bb_vars['OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS'].split(): | ||
190 | self.sstate_targets.extend(get_bb_var('DEPENDS', leaf_recipe).split()) | ||
191 | |||
178 | self.extraresults = {} | 192 | self.extraresults = {} |
179 | self.extraresults.setdefault('reproducible.rawlogs', {})['log'] = '' | ||
180 | self.extraresults.setdefault('reproducible', {}).setdefault('files', {}) | 193 | self.extraresults.setdefault('reproducible', {}).setdefault('files', {}) |
181 | 194 | ||
182 | def append_to_log(self, msg): | ||
183 | self.extraresults['reproducible.rawlogs']['log'] += msg | ||
184 | |||
185 | def compare_packages(self, reference_dir, test_dir, diffutils_sysroot): | 195 | def compare_packages(self, reference_dir, test_dir, diffutils_sysroot): |
186 | result = PackageCompareResults(self.oeqa_reproducible_excluded_packages) | 196 | result = PackageCompareResults(self.oeqa_reproducible_excluded_packages) |
187 | 197 | ||
@@ -208,7 +218,7 @@ class ReproducibleTests(OESelftestTestCase): | |||
208 | 218 | ||
209 | def write_package_list(self, package_class, name, packages): | 219 | def write_package_list(self, package_class, name, packages): |
210 | self.extraresults['reproducible']['files'].setdefault(package_class, {})[name] = [ | 220 | self.extraresults['reproducible']['files'].setdefault(package_class, {})[name] = [ |
211 | {'reference': p.reference, 'test': p.test} for p in packages] | 221 | p.reference.split("/./")[1] for p in packages] |
212 | 222 | ||
213 | def copy_file(self, source, dest): | 223 | def copy_file(self, source, dest): |
214 | bb.utils.mkdirhier(os.path.dirname(dest)) | 224 | bb.utils.mkdirhier(os.path.dirname(dest)) |
@@ -220,7 +230,6 @@ class ReproducibleTests(OESelftestTestCase): | |||
220 | tmpdir = os.path.join(self.topdir, name, 'tmp') | 230 | tmpdir = os.path.join(self.topdir, name, 'tmp') |
221 | if os.path.exists(tmpdir): | 231 | if os.path.exists(tmpdir): |
222 | bb.utils.remove(tmpdir, recurse=True) | 232 | bb.utils.remove(tmpdir, recurse=True) |
223 | |||
224 | config = textwrap.dedent('''\ | 233 | config = textwrap.dedent('''\ |
225 | PACKAGE_CLASSES = "{package_classes}" | 234 | PACKAGE_CLASSES = "{package_classes}" |
226 | TMPDIR = "{tmpdir}" | 235 | TMPDIR = "{tmpdir}" |
@@ -233,11 +242,41 @@ class ReproducibleTests(OESelftestTestCase): | |||
233 | ''').format(package_classes=' '.join('package_%s' % c for c in self.package_classes), | 242 | ''').format(package_classes=' '.join('package_%s' % c for c in self.package_classes), |
234 | tmpdir=tmpdir) | 243 | tmpdir=tmpdir) |
235 | 244 | ||
245 | # Export BB_CONSOLELOG to the calling function and make it constant to | ||
246 | # avoid a case where bitbake would get a timestamp-based filename but | ||
247 | # oe-selftest would, later, get another. | ||
248 | capture_vars.append("BB_CONSOLELOG") | ||
249 | config += 'BB_CONSOLELOG = "${LOG_DIR}/cooker/${MACHINE}/console.log"\n' | ||
250 | |||
251 | # We want different log files for each build, but a persistent bitbake | ||
252 | # may reuse the previous log file so restart the bitbake server. | ||
253 | bitbake("--kill-server") | ||
254 | |||
255 | def print_condensed_error_log(logs, context_lines=10, tail_lines=20): | ||
256 | """Prints errors with context and the end of the log.""" | ||
257 | |||
258 | logs = logs.split("\n") | ||
259 | for i, line in enumerate(logs): | ||
260 | if line.startswith("ERROR"): | ||
261 | self.logger.info("Found ERROR (line %d):" % (i + 1)) | ||
262 | for l in logs[i-context_lines:i+context_lines]: | ||
263 | self.logger.info(" " + l) | ||
264 | |||
265 | self.logger.info("End of log:") | ||
266 | for l in logs[-tail_lines:]: | ||
267 | self.logger.info(" " + l) | ||
268 | |||
269 | bitbake_failure_count = 0 | ||
236 | if not use_sstate: | 270 | if not use_sstate: |
237 | if self.sstate_targets: | 271 | if self.sstate_targets: |
238 | self.logger.info("Building prebuild for %s (sstate allowed)..." % (name)) | 272 | self.logger.info("Building prebuild for %s (sstate allowed)..." % (name)) |
239 | self.write_config(config) | 273 | self.write_config(config) |
240 | bitbake(' '.join(self.sstate_targets)) | 274 | try: |
275 | bitbake("--continue "+' '.join(self.sstate_targets)) | ||
276 | except AssertionError as e: | ||
277 | bitbake_failure_count += 1 | ||
278 | self.logger.error("Bitbake failed! but keep going... Log:") | ||
279 | print_condensed_error_log(str(e)) | ||
241 | 280 | ||
242 | # This config fragment will disable using shared and the sstate | 281 | # This config fragment will disable using shared and the sstate |
243 | # mirror, forcing a complete build from scratch | 282 | # mirror, forcing a complete build from scratch |
@@ -249,9 +288,24 @@ class ReproducibleTests(OESelftestTestCase): | |||
249 | self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT')) | 288 | self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT')) |
250 | self.write_config(config) | 289 | self.write_config(config) |
251 | d = get_bb_vars(capture_vars) | 290 | d = get_bb_vars(capture_vars) |
252 | # targets used to be called images | 291 | try: |
253 | bitbake(' '.join(getattr(self, 'images', self.targets))) | 292 | # targets used to be called images |
254 | return d | 293 | bitbake("--continue "+' '.join(getattr(self, 'images', self.targets))) |
294 | except AssertionError as e: | ||
295 | bitbake_failure_count += 1 | ||
296 | self.logger.error("Bitbake failed! but keep going... Log:") | ||
297 | print_condensed_error_log(str(e)) | ||
298 | |||
299 | # The calling function expects the existence of the deploy | ||
300 | # directories containing the packages. | ||
301 | # If bitbake failed to create them, do it manually | ||
302 | for c in self.package_classes: | ||
303 | deploy = d['DEPLOY_DIR_' + c.upper()] | ||
304 | if not os.path.exists(deploy): | ||
305 | self.logger.info("Manually creating %s" % deploy) | ||
306 | bb.utils.mkdirhier(deploy) | ||
307 | |||
308 | return (d, bitbake_failure_count) | ||
255 | 309 | ||
256 | def test_reproducible_builds(self): | 310 | def test_reproducible_builds(self): |
257 | def strip_topdir(s): | 311 | def strip_topdir(s): |
@@ -273,15 +327,30 @@ class ReproducibleTests(OESelftestTestCase): | |||
273 | os.chmod(save_dir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) | 327 | os.chmod(save_dir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) |
274 | self.logger.info('Non-reproducible packages will be copied to %s', save_dir) | 328 | self.logger.info('Non-reproducible packages will be copied to %s', save_dir) |
275 | 329 | ||
276 | vars_A = self.do_test_build('reproducibleA', self.build_from_sstate) | 330 | # The below bug shows that a few reproducible issues are depends on build dir path length. |
331 | # https://bugzilla.yoctoproject.org/show_bug.cgi?id=15554 | ||
332 | # So, the reproducibleA & reproducibleB directories are changed to reproducibleA & reproducibleB-extended to have different size. | ||
277 | 333 | ||
278 | vars_B = self.do_test_build('reproducibleB', False) | 334 | fails = [] |
335 | vars_list = [None, None] | ||
336 | |||
337 | for i, (name, use_sstate) in enumerate( | ||
338 | (('reproducibleA', self.build_from_sstate), | ||
339 | ('reproducibleB-extended', False))): | ||
340 | (variables, bitbake_failure_count) = self.do_test_build(name, use_sstate) | ||
341 | if bitbake_failure_count > 0: | ||
342 | self.logger.error('%s build failed. Trying to compute built packages differences but the test will fail.' % name) | ||
343 | fails.append("Bitbake %s failure" % name) | ||
344 | if self.save_results: | ||
345 | failure_log_path = os.path.join(save_dir, "bitbake-%s.log" % name) | ||
346 | self.logger.info('Failure log for %s will be copied to %s'% (name, failure_log_path)) | ||
347 | self.copy_file(variables["BB_CONSOLELOG"], failure_log_path) | ||
348 | vars_list[i] = variables | ||
279 | 349 | ||
350 | vars_A, vars_B = vars_list | ||
280 | # NOTE: The temp directories from the reproducible build are purposely | 351 | # NOTE: The temp directories from the reproducible build are purposely |
281 | # kept after the build so it can be diffed for debugging. | 352 | # kept after the build so it can be diffed for debugging. |
282 | 353 | ||
283 | fails = [] | ||
284 | |||
285 | for c in self.package_classes: | 354 | for c in self.package_classes: |
286 | with self.subTest(package_class=c): | 355 | with self.subTest(package_class=c): |
287 | package_class = 'package_' + c | 356 | package_class = 'package_' + c |
@@ -294,8 +363,6 @@ class ReproducibleTests(OESelftestTestCase): | |||
294 | 363 | ||
295 | self.logger.info('Reproducibility summary for %s: %s' % (c, result)) | 364 | self.logger.info('Reproducibility summary for %s: %s' % (c, result)) |
296 | 365 | ||
297 | self.append_to_log('\n'.join("%s: %s" % (r.status, r.test) for r in result.total)) | ||
298 | |||
299 | self.write_package_list(package_class, 'missing', result.missing) | 366 | self.write_package_list(package_class, 'missing', result.missing) |
300 | self.write_package_list(package_class, 'different', result.different) | 367 | self.write_package_list(package_class, 'different', result.different) |
301 | self.write_package_list(package_class, 'different_excluded', result.different_excluded) | 368 | self.write_package_list(package_class, 'different_excluded', result.different_excluded) |
@@ -330,7 +397,9 @@ class ReproducibleTests(OESelftestTestCase): | |||
330 | # Copy jquery to improve the diffoscope output usability | 397 | # Copy jquery to improve the diffoscope output usability |
331 | self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js')) | 398 | self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js')) |
332 | 399 | ||
333 | run_diffoscope('reproducibleA', 'reproducibleB', package_html_dir, max_report_size=self.max_report_size, | 400 | run_diffoscope('reproducibleA', 'reproducibleB-extended', package_html_dir, max_report_size=self.max_report_size, |
401 | max_diff_block_lines_saved=self.max_diff_block_lines_saved, | ||
402 | max_diff_block_lines=self.max_diff_block_lines, | ||
334 | native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir) | 403 | native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir) |
335 | 404 | ||
336 | if fails: | 405 | if fails: |
diff --git a/meta/lib/oeqa/selftest/cases/retain.py b/meta/lib/oeqa/selftest/cases/retain.py new file mode 100644 index 0000000000..892be45857 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/retain.py | |||
@@ -0,0 +1,241 @@ | |||
1 | # Tests for retain.bbclass | ||
2 | # | ||
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
5 | # SPDX-License-Identifier: MIT | ||
6 | # | ||
7 | |||
8 | import os | ||
9 | import glob | ||
10 | import fnmatch | ||
11 | import oe.path | ||
12 | import shutil | ||
13 | import tarfile | ||
14 | from oeqa.utils.commands import bitbake, get_bb_vars | ||
15 | from oeqa.selftest.case import OESelftestTestCase | ||
16 | |||
17 | class Retain(OESelftestTestCase): | ||
18 | |||
19 | def test_retain_always(self): | ||
20 | """ | ||
21 | Summary: Test retain class with RETAIN_DIRS_ALWAYS | ||
22 | Expected: Archive written to RETAIN_OUTDIR when build of test recipe completes | ||
23 | Product: oe-core | ||
24 | Author: Paul Eggleton <paul.eggleton@microsoft.com> | ||
25 | """ | ||
26 | |||
27 | test_recipe = 'quilt-native' | ||
28 | |||
29 | features = 'INHERIT += "retain"\n' | ||
30 | features += 'RETAIN_DIRS_ALWAYS = "${T}"\n' | ||
31 | self.write_config(features) | ||
32 | |||
33 | bitbake('-c clean %s' % test_recipe) | ||
34 | |||
35 | bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR']) | ||
36 | retain_outdir = bb_vars['RETAIN_OUTDIR'] or '' | ||
37 | tmpdir = bb_vars['TMPDIR'] | ||
38 | if len(retain_outdir) < 5: | ||
39 | self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir) | ||
40 | if not oe.path.is_path_parent(tmpdir, retain_outdir): | ||
41 | self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir)) | ||
42 | try: | ||
43 | shutil.rmtree(retain_outdir) | ||
44 | except FileNotFoundError: | ||
45 | pass | ||
46 | |||
47 | bitbake(test_recipe) | ||
48 | if not glob.glob(os.path.join(retain_outdir, '%s_temp_*.tar.gz' % test_recipe)): | ||
49 | self.fail('No output archive for %s created' % test_recipe) | ||
50 | |||
51 | |||
52 | def test_retain_failure(self): | ||
53 | """ | ||
54 | Summary: Test retain class default behaviour | ||
55 | Expected: Archive written to RETAIN_OUTDIR only when build of test | ||
56 | recipe fails, and archive contents are as expected | ||
57 | Product: oe-core | ||
58 | Author: Paul Eggleton <paul.eggleton@microsoft.com> | ||
59 | """ | ||
60 | |||
61 | test_recipe_fail = 'error' | ||
62 | |||
63 | features = 'INHERIT += "retain"\n' | ||
64 | self.write_config(features) | ||
65 | |||
66 | bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR', 'RETAIN_DIRS_ALWAYS', 'RETAIN_DIRS_GLOBAL_ALWAYS']) | ||
67 | if bb_vars['RETAIN_DIRS_ALWAYS']: | ||
68 | self.fail('RETAIN_DIRS_ALWAYS is set, this interferes with the test') | ||
69 | if bb_vars['RETAIN_DIRS_GLOBAL_ALWAYS']: | ||
70 | self.fail('RETAIN_DIRS_GLOBAL_ALWAYS is set, this interferes with the test') | ||
71 | retain_outdir = bb_vars['RETAIN_OUTDIR'] or '' | ||
72 | tmpdir = bb_vars['TMPDIR'] | ||
73 | if len(retain_outdir) < 5: | ||
74 | self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir) | ||
75 | if not oe.path.is_path_parent(tmpdir, retain_outdir): | ||
76 | self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir)) | ||
77 | |||
78 | try: | ||
79 | shutil.rmtree(retain_outdir) | ||
80 | except FileNotFoundError: | ||
81 | pass | ||
82 | |||
83 | bitbake('-c clean %s' % test_recipe_fail) | ||
84 | |||
85 | if os.path.exists(retain_outdir): | ||
86 | retain_dirlist = os.listdir(retain_outdir) | ||
87 | if retain_dirlist: | ||
88 | self.fail('RETAIN_OUTDIR should be empty without failure, contents:\n%s' % '\n'.join(retain_dirlist)) | ||
89 | |||
90 | result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True) | ||
91 | if result.status == 0: | ||
92 | self.fail('Build of %s did not fail as expected' % test_recipe_fail) | ||
93 | |||
94 | archives = glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % test_recipe_fail)) | ||
95 | if not archives: | ||
96 | self.fail('No output archive for %s created' % test_recipe_fail) | ||
97 | if len(archives) > 1: | ||
98 | self.fail('More than one archive for %s created' % test_recipe_fail) | ||
99 | for archive in archives: | ||
100 | found = False | ||
101 | archive_prefix = os.path.basename(archive).split('.tar')[0] | ||
102 | expected_prefix_start = '%s_workdir' % test_recipe_fail | ||
103 | if not archive_prefix.startswith(expected_prefix_start): | ||
104 | self.fail('Archive %s name does not start with expected prefix "%s"' % (os.path.basename(archive), expected_prefix_start)) | ||
105 | with tarfile.open(archive) as tf: | ||
106 | for ti in tf: | ||
107 | if not fnmatch.fnmatch(ti.name, '%s/*' % archive_prefix): | ||
108 | self.fail('File without tarball-named subdirectory within tarball %s: %s' % (os.path.basename(archive), ti.name)) | ||
109 | if ti.name.endswith('/temp/log.do_compile'): | ||
110 | found = True | ||
111 | if not found: | ||
112 | self.fail('Did not find log.do_compile in output archive %s' % os.path.basename(archive)) | ||
113 | |||
114 | |||
115 | def test_retain_global(self): | ||
116 | """ | ||
117 | Summary: Test retain class RETAIN_DIRS_GLOBAL_* behaviour | ||
118 | Expected: Ensure RETAIN_DIRS_GLOBAL_ALWAYS always causes an | ||
119 | archive to be created, and RETAIN_DIRS_GLOBAL_FAILURE | ||
120 | only causes an archive to be created on failure. | ||
121 | Also test archive naming (with : character) as an | ||
122 | added bonus. | ||
123 | Product: oe-core | ||
124 | Author: Paul Eggleton <paul.eggleton@microsoft.com> | ||
125 | """ | ||
126 | |||
127 | test_recipe = 'quilt-native' | ||
128 | test_recipe_fail = 'error' | ||
129 | |||
130 | features = 'INHERIT += "retain"\n' | ||
131 | features += 'RETAIN_DIRS_GLOBAL_ALWAYS = "${LOG_DIR};prefix=buildlogs"\n' | ||
132 | features += 'RETAIN_DIRS_GLOBAL_FAILURE = "${STAMPS_DIR}"\n' | ||
133 | self.write_config(features) | ||
134 | |||
135 | bitbake('-c clean %s' % test_recipe) | ||
136 | |||
137 | bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR', 'STAMPS_DIR']) | ||
138 | retain_outdir = bb_vars['RETAIN_OUTDIR'] or '' | ||
139 | tmpdir = bb_vars['TMPDIR'] | ||
140 | if len(retain_outdir) < 5: | ||
141 | self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir) | ||
142 | if not oe.path.is_path_parent(tmpdir, retain_outdir): | ||
143 | self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir)) | ||
144 | try: | ||
145 | shutil.rmtree(retain_outdir) | ||
146 | except FileNotFoundError: | ||
147 | pass | ||
148 | |||
149 | # Test success case | ||
150 | bitbake(test_recipe) | ||
151 | if not glob.glob(os.path.join(retain_outdir, 'buildlogs_*.tar.gz')): | ||
152 | self.fail('No output archive for LOG_DIR created') | ||
153 | stamps_dir = bb_vars['STAMPS_DIR'] | ||
154 | if glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % os.path.basename(stamps_dir))): | ||
155 | self.fail('Output archive for STAMPS_DIR created when it should not have been') | ||
156 | |||
157 | # Test failure case | ||
158 | result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True) | ||
159 | if result.status == 0: | ||
160 | self.fail('Build of %s did not fail as expected' % test_recipe_fail) | ||
161 | if not glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % os.path.basename(stamps_dir))): | ||
162 | self.fail('Output archive for STAMPS_DIR not created') | ||
163 | if len(glob.glob(os.path.join(retain_outdir, 'buildlogs_*.tar.gz'))) != 2: | ||
164 | self.fail('Should be exactly two buildlogs archives in output dir') | ||
165 | |||
166 | |||
167 | def test_retain_misc(self): | ||
168 | """ | ||
169 | Summary: Test retain class with RETAIN_ENABLED and RETAIN_TARBALL_SUFFIX | ||
170 | Expected: Archive written to RETAIN_OUTDIR only when RETAIN_ENABLED is set | ||
171 | and archive contents are as expected. Also test archive naming | ||
172 | (with : character) as an added bonus. | ||
173 | Product: oe-core | ||
174 | Author: Paul Eggleton <paul.eggleton@microsoft.com> | ||
175 | """ | ||
176 | |||
177 | test_recipe_fail = 'error' | ||
178 | |||
179 | features = 'INHERIT += "retain"\n' | ||
180 | features += 'RETAIN_DIRS_ALWAYS = "${T}"\n' | ||
181 | features += 'RETAIN_ENABLED = "0"\n' | ||
182 | self.write_config(features) | ||
183 | |||
184 | bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR']) | ||
185 | retain_outdir = bb_vars['RETAIN_OUTDIR'] or '' | ||
186 | tmpdir = bb_vars['TMPDIR'] | ||
187 | if len(retain_outdir) < 5: | ||
188 | self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir) | ||
189 | if not oe.path.is_path_parent(tmpdir, retain_outdir): | ||
190 | self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir)) | ||
191 | |||
192 | try: | ||
193 | shutil.rmtree(retain_outdir) | ||
194 | except FileNotFoundError: | ||
195 | pass | ||
196 | |||
197 | bitbake('-c clean %s' % test_recipe_fail) | ||
198 | result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True) | ||
199 | if result.status == 0: | ||
200 | self.fail('Build of %s did not fail as expected' % test_recipe_fail) | ||
201 | |||
202 | if os.path.exists(retain_outdir) and os.listdir(retain_outdir): | ||
203 | self.fail('RETAIN_OUTDIR should be empty with RETAIN_ENABLED = "0"') | ||
204 | |||
205 | features = 'INHERIT += "retain"\n' | ||
206 | features += 'RETAIN_DIRS_ALWAYS = "${T};prefix=recipelogs"\n' | ||
207 | features += 'RETAIN_TARBALL_SUFFIX = "${DATETIME}-testsuffix.tar.bz2"\n' | ||
208 | features += 'RETAIN_ENABLED = "1"\n' | ||
209 | self.write_config(features) | ||
210 | |||
211 | result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True) | ||
212 | if result.status == 0: | ||
213 | self.fail('Build of %s did not fail as expected' % test_recipe_fail) | ||
214 | |||
215 | archives = glob.glob(os.path.join(retain_outdir, '%s_*-testsuffix.tar.bz2' % test_recipe_fail)) | ||
216 | if not archives: | ||
217 | self.fail('No output archive for %s created' % test_recipe_fail) | ||
218 | if len(archives) != 2: | ||
219 | self.fail('Two archives for %s expected, but %d exist' % (test_recipe_fail, len(archives))) | ||
220 | recipelogs_found = False | ||
221 | workdir_found = False | ||
222 | for archive in archives: | ||
223 | contents_found = False | ||
224 | archive_prefix = os.path.basename(archive).split('.tar')[0] | ||
225 | if archive_prefix.startswith('%s_recipelogs' % test_recipe_fail): | ||
226 | recipelogs_found = True | ||
227 | if archive_prefix.startswith('%s_workdir' % test_recipe_fail): | ||
228 | workdir_found = True | ||
229 | with tarfile.open(archive, 'r:bz2') as tf: | ||
230 | for ti in tf: | ||
231 | if not fnmatch.fnmatch(ti.name, '%s/*' % (archive_prefix)): | ||
232 | self.fail('File without tarball-named subdirectory within tarball %s: %s' % (os.path.basename(archive), ti.name)) | ||
233 | if ti.name.endswith('/log.do_compile'): | ||
234 | contents_found = True | ||
235 | if not contents_found: | ||
236 | # Both archives should contain this file | ||
237 | self.fail('Did not find log.do_compile in output archive %s' % os.path.basename(archive)) | ||
238 | if not recipelogs_found: | ||
239 | self.fail('No archive with expected "recipelogs" prefix found') | ||
240 | if not workdir_found: | ||
241 | self.fail('No archive with expected "workdir" prefix found') | ||
diff --git a/meta/lib/oeqa/selftest/cases/runtime_test.py b/meta/lib/oeqa/selftest/cases/runtime_test.py index 13aa5f16c9..d58ffa80f5 100644 --- a/meta/lib/oeqa/selftest/cases/runtime_test.py +++ b/meta/lib/oeqa/selftest/cases/runtime_test.py | |||
@@ -174,7 +174,6 @@ TEST_RUNQEMUPARAMS += " slirp" | |||
174 | features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n' | 174 | features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n' |
175 | features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase') | 175 | features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase') |
176 | features += 'GPG_PATH = "%s"\n' % self.gpg_home | 176 | features += 'GPG_PATH = "%s"\n' % self.gpg_home |
177 | features += 'PSEUDO_IGNORE_PATHS .= ",%s"\n' % self.gpg_home | ||
178 | self.write_config(features) | 177 | self.write_config(features) |
179 | 178 | ||
180 | bitbake('core-image-full-cmdline socat') | 179 | bitbake('core-image-full-cmdline socat') |
@@ -211,7 +210,6 @@ TEST_RUNQEMUPARAMS += " slirp" | |||
211 | features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n' | 210 | features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n' |
212 | features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase') | 211 | features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase') |
213 | features += 'GPG_PATH = "%s"\n' % self.gpg_home | 212 | features += 'GPG_PATH = "%s"\n' % self.gpg_home |
214 | features += 'PSEUDO_IGNORE_PATHS .= ",%s"\n' % self.gpg_home | ||
215 | self.write_config(features) | 213 | self.write_config(features) |
216 | 214 | ||
217 | # Build core-image-sato and testimage | 215 | # Build core-image-sato and testimage |
@@ -273,6 +271,8 @@ TEST_RUNQEMUPARAMS += " slirp" | |||
273 | import subprocess, os | 271 | import subprocess, os |
274 | 272 | ||
275 | distro = oe.lsb.distro_identifier() | 273 | distro = oe.lsb.distro_identifier() |
274 | # Merge request to address the issue on centos/rhel/derivatives: | ||
275 | # https://gitlab.com/cki-project/kernel-ark/-/merge_requests/3449 | ||
276 | if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'centos-9', 'ubuntu-16.04', 'ubuntu-18.04'] or | 276 | if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'centos-9', 'ubuntu-16.04', 'ubuntu-18.04'] or |
277 | distro.startswith('almalinux') or distro.startswith('rocky')): | 277 | distro.startswith('almalinux') or distro.startswith('rocky')): |
278 | self.skipTest('virgl headless cannot be tested with %s' %(distro)) | 278 | self.skipTest('virgl headless cannot be tested with %s' %(distro)) |
@@ -310,10 +310,7 @@ class Postinst(OESelftestTestCase): | |||
310 | features += 'IMAGE_FEATURES += "package-management empty-root-password"\n' | 310 | features += 'IMAGE_FEATURES += "package-management empty-root-password"\n' |
311 | features += 'PACKAGE_CLASSES = "%s"\n' % classes | 311 | features += 'PACKAGE_CLASSES = "%s"\n' % classes |
312 | if init_manager == "systemd": | 312 | if init_manager == "systemd": |
313 | features += 'DISTRO_FEATURES:append = " systemd usrmerge"\n' | 313 | features += 'INIT_MANAGER = "systemd"\n' |
314 | features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n' | ||
315 | features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n' | ||
316 | features += 'VIRTUAL-RUNTIME_initscripts = ""\n' | ||
317 | self.write_config(features) | 314 | self.write_config(features) |
318 | 315 | ||
319 | bitbake('core-image-minimal') | 316 | bitbake('core-image-minimal') |
diff --git a/meta/lib/oeqa/selftest/cases/rust.py b/meta/lib/oeqa/selftest/cases/rust.py index ad14189c6d..d99a58d6b9 100644 --- a/meta/lib/oeqa/selftest/cases/rust.py +++ b/meta/lib/oeqa/selftest/cases/rust.py | |||
@@ -1,11 +1,11 @@ | |||
1 | # SPDX-License-Identifier: MIT | 1 | # SPDX-License-Identifier: MIT |
2 | import os | ||
3 | import subprocess | 2 | import subprocess |
4 | import time | 3 | import time |
5 | from oeqa.core.decorator import OETestTag | 4 | from oeqa.core.decorator import OETestTag |
5 | from oeqa.core.decorator.data import skipIfArch | ||
6 | from oeqa.core.case import OEPTestResultTestCase | 6 | from oeqa.core.case import OEPTestResultTestCase |
7 | from oeqa.selftest.case import OESelftestTestCase | 7 | from oeqa.selftest.case import OESelftestTestCase |
8 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu, Command | 8 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu |
9 | from oeqa.utils.sshcontrol import SSHControl | 9 | from oeqa.utils.sshcontrol import SSHControl |
10 | 10 | ||
11 | def parse_results(filename): | 11 | def parse_results(filename): |
@@ -38,15 +38,9 @@ def parse_results(filename): | |||
38 | @OETestTag("toolchain-user") | 38 | @OETestTag("toolchain-user") |
39 | @OETestTag("runqemu") | 39 | @OETestTag("runqemu") |
40 | class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase): | 40 | class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase): |
41 | def test_rust(self, *args, **kwargs): | ||
42 | # Disable Rust Oe-selftest | ||
43 | #self.skipTest("The Rust Oe-selftest is disabled.") | ||
44 | |||
45 | # Skip mips32 target since it is unstable with rust tests | ||
46 | machine = get_bb_var('MACHINE') | ||
47 | if machine == "qemumips": | ||
48 | self.skipTest("The mips32 target is skipped for Rust Oe-selftest.") | ||
49 | 41 | ||
42 | @skipIfArch(['mips', 'mips64']) | ||
43 | def test_rust(self, *args, **kwargs): | ||
50 | # build remote-test-server before image build | 44 | # build remote-test-server before image build |
51 | recipe = "rust" | 45 | recipe = "rust" |
52 | start_time = time.time() | 46 | start_time = time.time() |
@@ -66,132 +60,43 @@ class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase): | |||
66 | # bless: First runs rustfmt to format the codebase, | 60 | # bless: First runs rustfmt to format the codebase, |
67 | # then runs tidy checks. | 61 | # then runs tidy checks. |
68 | exclude_list = [ | 62 | exclude_list = [ |
69 | 'compiler/rustc', | 63 | 'src/bootstrap', |
70 | 'compiler/rustc_interface/src/tests.rs', | ||
71 | 'library/panic_abort', | ||
72 | 'library/panic_unwind', | ||
73 | 'library/test/src/stats/tests.rs', | ||
74 | 'src/bootstrap/builder/tests.rs', | ||
75 | 'src/doc/rustc', | 64 | 'src/doc/rustc', |
76 | 'src/doc/rustdoc', | 65 | 'src/doc/rustdoc', |
77 | 'src/doc/unstable-book', | 66 | 'src/doc/unstable-book', |
67 | 'src/etc/test-float-parse', | ||
78 | 'src/librustdoc', | 68 | 'src/librustdoc', |
79 | 'src/rustdoc-json-types', | 69 | 'src/rustdoc-json-types', |
80 | 'src/tools/compiletest/src/common.rs', | 70 | 'src/tools/jsondoclint', |
81 | 'src/tools/lint-docs', | 71 | 'src/tools/lint-docs', |
72 | 'src/tools/replace-version-placeholder', | ||
82 | 'src/tools/rust-analyzer', | 73 | 'src/tools/rust-analyzer', |
83 | 'src/tools/rustdoc-themes', | 74 | 'src/tools/rustdoc-themes', |
84 | 'src/tools/tidy', | 75 | 'src/tools/rust-installer', |
76 | 'src/tools/suggest-tests', | ||
85 | 'tests/assembly/asm/aarch64-outline-atomics.rs', | 77 | 'tests/assembly/asm/aarch64-outline-atomics.rs', |
86 | 'tests/codegen/abi-main-signature-32bit-c-int.rs', | 78 | 'tests/codegen/issues/issue-122805.rs', |
87 | 'tests/codegen/abi-repr-ext.rs', | ||
88 | 'tests/codegen/abi-x86-interrupt.rs', | ||
89 | 'tests/codegen/branch-protection.rs', | ||
90 | 'tests/codegen/catch-unwind.rs', | ||
91 | 'tests/codegen/cf-protection.rs', | ||
92 | 'tests/codegen/enum-bounds-check-derived-idx.rs', | ||
93 | 'tests/codegen/force-unwind-tables.rs', | ||
94 | 'tests/codegen/intrinsic-no-unnamed-attr.rs', | ||
95 | 'tests/codegen/issues/issue-103840.rs', | ||
96 | 'tests/codegen/issues/issue-47278.rs', | ||
97 | 'tests/codegen/issues/issue-73827-bounds-check-index-in-subexpr.rs', | ||
98 | 'tests/codegen/lifetime_start_end.rs', | ||
99 | 'tests/codegen/local-generics-in-exe-internalized.rs', | ||
100 | 'tests/codegen/match-unoptimized.rs', | ||
101 | 'tests/codegen/noalias-rwlockreadguard.rs', | ||
102 | 'tests/codegen/non-terminate/nonempty-infinite-loop.rs', | ||
103 | 'tests/codegen/noreturn-uninhabited.rs', | ||
104 | 'tests/codegen/repr-transparent-aggregates-3.rs', | ||
105 | 'tests/codegen/riscv-abi/call-llvm-intrinsics.rs', | ||
106 | 'tests/codegen/riscv-abi/riscv64-lp64f-lp64d-abi.rs', | ||
107 | 'tests/codegen/riscv-abi/riscv64-lp64d-abi.rs', | ||
108 | 'tests/codegen/sse42-implies-crc32.rs', | ||
109 | 'tests/codegen/thread-local.rs', | 79 | 'tests/codegen/thread-local.rs', |
110 | 'tests/codegen/uninit-consts.rs', | 80 | 'tests/mir-opt/', |
111 | 'tests/pretty/raw-str-nonexpr.rs', | ||
112 | 'tests/run-make', | 81 | 'tests/run-make', |
113 | 'tests/run-make-fulldeps', | 82 | 'tests/run-make-fulldeps', |
114 | 'tests/rustdoc', | 83 | 'tests/rustdoc', |
115 | 'tests/rustdoc-json', | 84 | 'tests/rustdoc-json', |
116 | 'tests/rustdoc-js-std', | 85 | 'tests/rustdoc-js-std', |
117 | 'tests/rustdoc-ui/cfg-test.rs', | ||
118 | 'tests/rustdoc-ui/check-cfg-test.rs', | ||
119 | 'tests/rustdoc-ui/display-output.rs', | ||
120 | 'tests/rustdoc-ui/doc-comment-multi-line-attr.rs', | ||
121 | 'tests/rustdoc-ui/doc-comment-multi-line-cfg-attr.rs', | ||
122 | 'tests/rustdoc-ui/doc-test-doctest-feature.rs', | ||
123 | 'tests/rustdoc-ui/doctest-multiline-crate-attribute.rs', | ||
124 | 'tests/rustdoc-ui/doctest-output.rs', | ||
125 | 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs', | ||
126 | 'tests/rustdoc-ui/failed-doctest-compile-fail.rs', | ||
127 | 'tests/rustdoc-ui/issue-80992.rs', | ||
128 | 'tests/rustdoc-ui/issue-91134.rs', | ||
129 | 'tests/rustdoc-ui/nocapture-fail.rs', | ||
130 | 'tests/rustdoc-ui/nocapture.rs', | ||
131 | 'tests/rustdoc-ui/no-run-flag.rs', | ||
132 | 'tests/rustdoc-ui/run-directory.rs', | ||
133 | 'tests/rustdoc-ui/test-no_std.rs', | ||
134 | 'tests/rustdoc-ui/test-type.rs', | ||
135 | 'tests/rustdoc/unit-return.rs', | ||
136 | 'tests/ui/abi/stack-probes-lto.rs', | 86 | 'tests/ui/abi/stack-probes-lto.rs', |
137 | 'tests/ui/abi/stack-probes.rs', | 87 | 'tests/ui/abi/stack-probes.rs', |
138 | 'tests/ui/array-slice-vec/subslice-patterns-const-eval-match.rs', | 88 | 'tests/ui/codegen/mismatched-data-layouts.rs', |
139 | 'tests/ui/asm/x86_64/sym.rs', | 89 | 'tests/codegen/rust-abi-arch-specific-adjustment.rs', |
140 | 'tests/ui/associated-type-bounds/fn-apit.rs', | ||
141 | 'tests/ui/associated-type-bounds/fn-dyn-apit.rs', | ||
142 | 'tests/ui/associated-type-bounds/fn-wrap-apit.rs', | ||
143 | 'tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs', | 90 | 'tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs', |
144 | 'tests/ui/drop/dynamic-drop.rs', | 91 | 'tests/ui/feature-gates/version_check.rs', |
145 | 'tests/ui/empty_global_asm.rs', | ||
146 | 'tests/ui/functions-closures/fn-help-with-err.rs', | ||
147 | 'tests/ui/linkage-attr/issue-10755.rs', | ||
148 | 'tests/ui/macros/restricted-shadowing-legacy.rs', | ||
149 | 'tests/ui/process/nofile-limit.rs', | ||
150 | 'tests/ui/process/process-panic-after-fork.rs', | ||
151 | 'tests/ui/process/process-sigpipe.rs', | ||
152 | 'tests/ui/simd/target-feature-mixup.rs', | ||
153 | 'tests/ui/structs-enums/multiple-reprs.rs', | ||
154 | 'src/tools/jsondoclint', | ||
155 | 'src/tools/replace-version-placeholder', | ||
156 | 'tests/codegen/abi-efiapi.rs', | ||
157 | 'tests/codegen/abi-sysv64.rs', | ||
158 | 'tests/codegen/align-byval.rs', | ||
159 | 'tests/codegen/align-fn.rs', | ||
160 | 'tests/codegen/asm-powerpc-clobbers.rs', | ||
161 | 'tests/codegen/async-fn-debug-awaitee-field.rs', | ||
162 | 'tests/codegen/binary-search-index-no-bound-check.rs', | ||
163 | 'tests/codegen/call-metadata.rs', | ||
164 | 'tests/codegen/debug-column.rs', | ||
165 | 'tests/codegen/debug-limited.rs', | ||
166 | 'tests/codegen/debuginfo-generic-closure-env-names.rs', | ||
167 | 'tests/codegen/drop.rs', | ||
168 | 'tests/codegen/dst-vtable-align-nonzero.rs', | ||
169 | 'tests/codegen/enable-lto-unit-splitting.rs', | ||
170 | 'tests/codegen/enum/enum-u128.rs', | ||
171 | 'tests/codegen/fn-impl-trait-self.rs', | ||
172 | 'tests/codegen/inherit_overflow.rs', | ||
173 | 'tests/codegen/inline-function-args-debug-info.rs', | ||
174 | 'tests/codegen/intrinsics/mask.rs', | ||
175 | 'tests/codegen/intrinsics/transmute-niched.rs', | ||
176 | 'tests/codegen/issues/issue-73258.rs', | ||
177 | 'tests/codegen/issues/issue-75546.rs', | ||
178 | 'tests/codegen/issues/issue-77812.rs', | ||
179 | 'tests/codegen/issues/issue-98156-const-arg-temp-lifetime.rs', | ||
180 | 'tests/codegen/llvm-ident.rs', | ||
181 | 'tests/codegen/mainsubprogram.rs', | ||
182 | 'tests/codegen/move-operands.rs', | ||
183 | 'tests/codegen/repr/transparent-mips64.rs', | ||
184 | 'tests/mir-opt/', | ||
185 | 'tests/rustdoc-json', | ||
186 | 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs', | ||
187 | 'tests/rustdoc-ui/no-run-flag.rs', | ||
188 | 'tests/ui-fulldeps/', | 92 | 'tests/ui-fulldeps/', |
189 | 'tests/ui/numbers-arithmetic/u128.rs' | 93 | 'tests/ui/process/nofile-limit.rs', |
94 | 'tidyselftest' | ||
190 | ] | 95 | ] |
191 | 96 | ||
192 | exclude_fail_tests = " ".join([" --exclude " + item for item in exclude_list]) | 97 | exclude_fail_tests = " ".join([" --exclude " + item for item in exclude_list]) |
193 | # Add exclude_fail_tests with other test arguments | 98 | # Add exclude_fail_tests with other test arguments |
194 | testargs = exclude_fail_tests + " --doc --no-fail-fast --bless" | 99 | testargs = exclude_fail_tests + " --no-fail-fast --bless" |
195 | 100 | ||
196 | # wrap the execution with a qemu instance. | 101 | # wrap the execution with a qemu instance. |
197 | # Tests are run with 512 tasks in parallel to execute all tests very quickly | 102 | # Tests are run with 512 tasks in parallel to execute all tests very quickly |
@@ -199,7 +104,7 @@ class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase): | |||
199 | # Copy remote-test-server to image through scp | 104 | # Copy remote-test-server to image through scp |
200 | host_sys = get_bb_var("RUST_BUILD_SYS", "rust") | 105 | host_sys = get_bb_var("RUST_BUILD_SYS", "rust") |
201 | ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user="root") | 106 | ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user="root") |
202 | ssh.copy_to(builddir + "/build/" + host_sys + "/stage1-tools-bin/remote-test-server","~/") | 107 | ssh.copy_to(builddir + "/build/" + host_sys + "/stage2-tools-bin/remote-test-server","~/") |
203 | # Execute remote-test-server on image through background ssh | 108 | # Execute remote-test-server on image through background ssh |
204 | command = '~/remote-test-server --bind 0.0.0.0:12345 -v' | 109 | command = '~/remote-test-server --bind 0.0.0.0:12345 -v' |
205 | sshrun=subprocess.Popen(("ssh", '-o', 'UserKnownHostsFile=/dev/null', '-o', 'StrictHostKeyChecking=no', '-f', "root@%s" % qemu.ip, command), shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) | 110 | sshrun=subprocess.Popen(("ssh", '-o', 'UserKnownHostsFile=/dev/null', '-o', 'StrictHostKeyChecking=no', '-f', "root@%s" % qemu.ip, command), shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
@@ -210,9 +115,8 @@ class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase): | |||
210 | tmpdir = get_bb_var("TMPDIR", "rust") | 115 | tmpdir = get_bb_var("TMPDIR", "rust") |
211 | 116 | ||
212 | # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools. | 117 | # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools. |
213 | cmd = " export PATH=%s/recipe-sysroot-native/usr/bin:$PATH;" % rustlibpath | 118 | cmd = "export TARGET_VENDOR=\"-poky\";" |
214 | cmd = cmd + " export TARGET_VENDOR=\"-poky\";" | 119 | cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/python3-native:%s/recipe-sysroot-native/usr/bin:%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, rustlibpath, rustlibpath, tcpath, tmpdir) |
215 | cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, tcpath, tmpdir) | ||
216 | cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath | 120 | cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath |
217 | # Trigger testing. | 121 | # Trigger testing. |
218 | cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip | 122 | cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip |
diff --git a/meta/lib/oeqa/selftest/cases/sdk.py b/meta/lib/oeqa/selftest/cases/sdk.py new file mode 100644 index 0000000000..3971365029 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/sdk.py | |||
@@ -0,0 +1,39 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | import os.path | ||
8 | |||
9 | from oeqa.selftest.case import OESelftestTestCase | ||
10 | from oeqa.utils.commands import bitbake, get_bb_vars | ||
11 | |||
12 | class SDKTests(OESelftestTestCase): | ||
13 | |||
14 | def load_manifest(self, filename): | ||
15 | manifest = {} | ||
16 | with open(filename) as f: | ||
17 | for line in f: | ||
18 | name, arch, version = line.split(maxsplit=3) | ||
19 | manifest[name] = (version, arch) | ||
20 | return manifest | ||
21 | |||
22 | def test_sdk_manifests(self): | ||
23 | image = "core-image-minimal" | ||
24 | |||
25 | self.write_config(""" | ||
26 | TOOLCHAIN_HOST_TASK:append = " nativesdk-selftest-hello" | ||
27 | IMAGE_INSTALL:append = " selftest-hello" | ||
28 | """) | ||
29 | |||
30 | bitbake(f"{image} -c populate_sdk") | ||
31 | vars = get_bb_vars(['SDK_DEPLOY', 'TOOLCHAIN_OUTPUTNAME'], image) | ||
32 | |||
33 | path = os.path.join(vars["SDK_DEPLOY"], vars["TOOLCHAIN_OUTPUTNAME"] + ".host.manifest") | ||
34 | self.assertNotEqual(os.path.getsize(path), 0, msg="Host manifest is empty") | ||
35 | self.assertIn("nativesdk-selftest-hello", self.load_manifest(path)) | ||
36 | |||
37 | path = os.path.join(vars["SDK_DEPLOY"], vars["TOOLCHAIN_OUTPUTNAME"] + ".target.manifest") | ||
38 | self.assertNotEqual(os.path.getsize(path), 0, msg="Target manifest is empty") | ||
39 | self.assertIn("selftest-hello", self.load_manifest(path)) | ||
diff --git a/meta/lib/oeqa/selftest/cases/signing.py b/meta/lib/oeqa/selftest/cases/signing.py index 18cce0ba25..4df45ba032 100644 --- a/meta/lib/oeqa/selftest/cases/signing.py +++ b/meta/lib/oeqa/selftest/cases/signing.py | |||
@@ -83,6 +83,8 @@ class Signing(OESelftestTestCase): | |||
83 | feature += 'RPM_GPG_PASSPHRASE = "test123"\n' | 83 | feature += 'RPM_GPG_PASSPHRASE = "test123"\n' |
84 | feature += 'RPM_GPG_NAME = "testuser"\n' | 84 | feature += 'RPM_GPG_NAME = "testuser"\n' |
85 | feature += 'GPG_PATH = "%s"\n' % self.gpg_dir | 85 | feature += 'GPG_PATH = "%s"\n' % self.gpg_dir |
86 | feature += 'PACKAGECONFIG:append:pn-rpm-native = " sequoia"\n' | ||
87 | feature += 'PACKAGECONFIG:append:pn-rpm = " sequoia"\n' | ||
86 | 88 | ||
87 | self.write_config(feature) | 89 | self.write_config(feature) |
88 | 90 | ||
diff --git a/meta/lib/oeqa/selftest/cases/spdx.py b/meta/lib/oeqa/selftest/cases/spdx.py index 05fc4e390b..8cd4e83ca2 100644 --- a/meta/lib/oeqa/selftest/cases/spdx.py +++ b/meta/lib/oeqa/selftest/cases/spdx.py | |||
@@ -6,29 +6,39 @@ | |||
6 | 6 | ||
7 | import json | 7 | import json |
8 | import os | 8 | import os |
9 | import textwrap | ||
10 | import hashlib | ||
11 | from pathlib import Path | ||
9 | from oeqa.selftest.case import OESelftestTestCase | 12 | from oeqa.selftest.case import OESelftestTestCase |
10 | from oeqa.utils.commands import bitbake, get_bb_var, runCmd | 13 | from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd |
14 | import oe.spdx30 | ||
11 | 15 | ||
12 | class SPDXCheck(OESelftestTestCase): | ||
13 | 16 | ||
17 | class SPDX22Check(OESelftestTestCase): | ||
14 | @classmethod | 18 | @classmethod |
15 | def setUpClass(cls): | 19 | def setUpClass(cls): |
16 | super(SPDXCheck, cls).setUpClass() | 20 | super().setUpClass() |
17 | bitbake("python3-spdx-tools-native") | 21 | bitbake("python3-spdx-tools-native") |
18 | bitbake("-c addto_recipe_sysroot python3-spdx-tools-native") | 22 | bitbake("-c addto_recipe_sysroot python3-spdx-tools-native") |
19 | 23 | ||
20 | def check_recipe_spdx(self, high_level_dir, spdx_file, target_name): | 24 | def check_recipe_spdx(self, high_level_dir, spdx_file, target_name): |
21 | config = """ | 25 | config = textwrap.dedent( |
22 | INHERIT += "create-spdx" | 26 | """\ |
23 | """ | 27 | INHERIT:remove = "create-spdx" |
28 | INHERIT += "create-spdx-2.2" | ||
29 | """ | ||
30 | ) | ||
24 | self.write_config(config) | 31 | self.write_config(config) |
25 | 32 | ||
26 | deploy_dir = get_bb_var("DEPLOY_DIR") | 33 | deploy_dir = get_bb_var("DEPLOY_DIR") |
27 | machine_var = get_bb_var("MACHINE") | 34 | arch_dir = get_bb_var("PACKAGE_ARCH", target_name) |
35 | spdx_version = get_bb_var("SPDX_VERSION") | ||
28 | # qemux86-64 creates the directory qemux86_64 | 36 | # qemux86-64 creates the directory qemux86_64 |
29 | machine_dir = machine_var.replace("-", "_") | 37 | #arch_dir = arch_var.replace("-", "_") |
30 | 38 | ||
31 | full_file_path = os.path.join(deploy_dir, "spdx", machine_dir, high_level_dir, spdx_file) | 39 | full_file_path = os.path.join( |
40 | deploy_dir, "spdx", spdx_version, arch_dir, high_level_dir, spdx_file | ||
41 | ) | ||
32 | 42 | ||
33 | try: | 43 | try: |
34 | os.remove(full_file_path) | 44 | os.remove(full_file_path) |
@@ -43,8 +53,13 @@ INHERIT += "create-spdx" | |||
43 | self.assertNotEqual(report, None) | 53 | self.assertNotEqual(report, None) |
44 | self.assertNotEqual(report["SPDXID"], None) | 54 | self.assertNotEqual(report["SPDXID"], None) |
45 | 55 | ||
46 | python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'nativepython3') | 56 | python = os.path.join( |
47 | validator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'pyspdxtools') | 57 | get_bb_var("STAGING_BINDIR", "python3-spdx-tools-native"), |
58 | "nativepython3", | ||
59 | ) | ||
60 | validator = os.path.join( | ||
61 | get_bb_var("STAGING_BINDIR", "python3-spdx-tools-native"), "pyspdxtools" | ||
62 | ) | ||
48 | result = runCmd("{} {} -i {}".format(python, validator, filename)) | 63 | result = runCmd("{} {} -i {}".format(python, validator, filename)) |
49 | 64 | ||
50 | self.assertExists(full_file_path) | 65 | self.assertExists(full_file_path) |
@@ -52,3 +67,222 @@ INHERIT += "create-spdx" | |||
52 | 67 | ||
53 | def test_spdx_base_files(self): | 68 | def test_spdx_base_files(self): |
54 | self.check_recipe_spdx("packages", "base-files.spdx.json", "base-files") | 69 | self.check_recipe_spdx("packages", "base-files.spdx.json", "base-files") |
70 | |||
71 | def test_spdx_tar(self): | ||
72 | self.check_recipe_spdx("packages", "tar.spdx.json", "tar") | ||
73 | |||
74 | |||
75 | class SPDX3CheckBase(object): | ||
76 | """ | ||
77 | Base class for checking SPDX 3 based tests | ||
78 | """ | ||
79 | |||
80 | def check_spdx_file(self, filename): | ||
81 | self.assertExists(filename) | ||
82 | |||
83 | # Read the file | ||
84 | objset = oe.spdx30.SHACLObjectSet() | ||
85 | with open(filename, "r") as f: | ||
86 | d = oe.spdx30.JSONLDDeserializer() | ||
87 | d.read(f, objset) | ||
88 | |||
89 | return objset | ||
90 | |||
91 | def check_recipe_spdx(self, target_name, spdx_path, *, task=None, extraconf=""): | ||
92 | config = ( | ||
93 | textwrap.dedent( | ||
94 | f"""\ | ||
95 | INHERIT:remove = "create-spdx" | ||
96 | INHERIT += "{self.SPDX_CLASS}" | ||
97 | """ | ||
98 | ) | ||
99 | + textwrap.dedent(extraconf) | ||
100 | ) | ||
101 | |||
102 | self.write_config(config) | ||
103 | |||
104 | if task: | ||
105 | bitbake(f"-c {task} {target_name}") | ||
106 | else: | ||
107 | bitbake(target_name) | ||
108 | |||
109 | filename = spdx_path.format( | ||
110 | **get_bb_vars( | ||
111 | [ | ||
112 | "DEPLOY_DIR_IMAGE", | ||
113 | "DEPLOY_DIR_SPDX", | ||
114 | "MACHINE", | ||
115 | "MACHINE_ARCH", | ||
116 | "SDKMACHINE", | ||
117 | "SDK_DEPLOY", | ||
118 | "SPDX_VERSION", | ||
119 | "SSTATE_PKGARCH", | ||
120 | "TOOLCHAIN_OUTPUTNAME", | ||
121 | ], | ||
122 | target_name, | ||
123 | ) | ||
124 | ) | ||
125 | |||
126 | return self.check_spdx_file(filename) | ||
127 | |||
128 | def check_objset_missing_ids(self, objset): | ||
129 | for o in objset.foreach_type(oe.spdx30.SpdxDocument): | ||
130 | doc = o | ||
131 | break | ||
132 | else: | ||
133 | self.assertTrue(False, "Unable to find SpdxDocument") | ||
134 | |||
135 | missing_ids = objset.missing_ids - set(i.externalSpdxId for i in doc.import_) | ||
136 | if missing_ids: | ||
137 | self.assertTrue( | ||
138 | False, | ||
139 | "The following SPDXIDs are unresolved:\n " + "\n ".join(missing_ids), | ||
140 | ) | ||
141 | |||
142 | |||
143 | class SPDX30Check(SPDX3CheckBase, OESelftestTestCase): | ||
144 | SPDX_CLASS = "create-spdx-3.0" | ||
145 | |||
146 | def test_base_files(self): | ||
147 | self.check_recipe_spdx( | ||
148 | "base-files", | ||
149 | "{DEPLOY_DIR_SPDX}/{MACHINE_ARCH}/packages/package-base-files.spdx.json", | ||
150 | ) | ||
151 | |||
152 | def test_gcc_include_source(self): | ||
153 | objset = self.check_recipe_spdx( | ||
154 | "gcc", | ||
155 | "{DEPLOY_DIR_SPDX}/{SSTATE_PKGARCH}/recipes/recipe-gcc.spdx.json", | ||
156 | extraconf="""\ | ||
157 | SPDX_INCLUDE_SOURCES = "1" | ||
158 | """, | ||
159 | ) | ||
160 | |||
161 | gcc_pv = get_bb_var("PV", "gcc") | ||
162 | filename = f"gcc-{gcc_pv}/README" | ||
163 | found = False | ||
164 | for software_file in objset.foreach_type(oe.spdx30.software_File): | ||
165 | if software_file.name == filename: | ||
166 | found = True | ||
167 | self.logger.info( | ||
168 | f"The spdxId of {filename} in recipe-gcc.spdx.json is {software_file.spdxId}" | ||
169 | ) | ||
170 | break | ||
171 | |||
172 | self.assertTrue( | ||
173 | found, f"Not found source file {filename} in recipe-gcc.spdx.json\n" | ||
174 | ) | ||
175 | |||
176 | def test_core_image_minimal(self): | ||
177 | objset = self.check_recipe_spdx( | ||
178 | "core-image-minimal", | ||
179 | "{DEPLOY_DIR_IMAGE}/core-image-minimal-{MACHINE}.rootfs.spdx.json", | ||
180 | ) | ||
181 | |||
182 | # Document should be fully linked | ||
183 | self.check_objset_missing_ids(objset) | ||
184 | |||
185 | def test_core_image_minimal_sdk(self): | ||
186 | objset = self.check_recipe_spdx( | ||
187 | "core-image-minimal", | ||
188 | "{SDK_DEPLOY}/{TOOLCHAIN_OUTPUTNAME}.spdx.json", | ||
189 | task="populate_sdk", | ||
190 | ) | ||
191 | |||
192 | # Document should be fully linked | ||
193 | self.check_objset_missing_ids(objset) | ||
194 | |||
195 | def test_baremetal_helloworld(self): | ||
196 | objset = self.check_recipe_spdx( | ||
197 | "baremetal-helloworld", | ||
198 | "{DEPLOY_DIR_IMAGE}/baremetal-helloworld-image-{MACHINE}.spdx.json", | ||
199 | extraconf="""\ | ||
200 | TCLIBC = "baremetal" | ||
201 | """, | ||
202 | ) | ||
203 | |||
204 | # Document should be fully linked | ||
205 | self.check_objset_missing_ids(objset) | ||
206 | |||
207 | def test_extra_opts(self): | ||
208 | HOST_SPDXID = "http://foo.bar/spdx/bar2" | ||
209 | |||
210 | EXTRACONF = textwrap.dedent( | ||
211 | f"""\ | ||
212 | SPDX_INVOKED_BY_name = "CI Tool" | ||
213 | SPDX_INVOKED_BY_type = "software" | ||
214 | |||
215 | SPDX_ON_BEHALF_OF_name = "John Doe" | ||
216 | SPDX_ON_BEHALF_OF_type = "person" | ||
217 | SPDX_ON_BEHALF_OF_id_email = "John.Doe@noreply.com" | ||
218 | |||
219 | SPDX_PACKAGE_SUPPLIER_name = "ACME Embedded Widgets" | ||
220 | SPDX_PACKAGE_SUPPLIER_type = "organization" | ||
221 | |||
222 | SPDX_AUTHORS += "authorA" | ||
223 | SPDX_AUTHORS_authorA_ref = "SPDX_ON_BEHALF_OF" | ||
224 | |||
225 | SPDX_BUILD_HOST = "host" | ||
226 | |||
227 | SPDX_IMPORTS += "host" | ||
228 | SPDX_IMPORTS_host_spdxid = "{HOST_SPDXID}" | ||
229 | |||
230 | SPDX_INCLUDE_BUILD_VARIABLES = "1" | ||
231 | SPDX_INCLUDE_BITBAKE_PARENT_BUILD = "1" | ||
232 | SPDX_INCLUDE_TIMESTAMPS = "1" | ||
233 | |||
234 | SPDX_PRETTY = "1" | ||
235 | """ | ||
236 | ) | ||
237 | extraconf_hash = hashlib.sha1(EXTRACONF.encode("utf-8")).hexdigest() | ||
238 | |||
239 | objset = self.check_recipe_spdx( | ||
240 | "core-image-minimal", | ||
241 | "{DEPLOY_DIR_IMAGE}/core-image-minimal-{MACHINE}.rootfs.spdx.json", | ||
242 | # Many SPDX variables do not trigger a rebuild, since they are | ||
243 | # intended to record information at the time of the build. As such, | ||
244 | # the extra configuration alone may not trigger a rebuild, and even | ||
245 | # if it does, the task hash won't necessarily be unique. In order | ||
246 | # to make sure rebuilds happen, but still allow these test objects | ||
247 | # to be pulled from sstate (e.g. remain reproducible), change the | ||
248 | # namespace prefix to include the hash of the extra configuration | ||
249 | extraconf=textwrap.dedent( | ||
250 | f"""\ | ||
251 | SPDX_NAMESPACE_PREFIX = "http://spdx.org/spdxdocs/{extraconf_hash}" | ||
252 | """ | ||
253 | ) | ||
254 | + EXTRACONF, | ||
255 | ) | ||
256 | |||
257 | # Document should be fully linked | ||
258 | self.check_objset_missing_ids(objset) | ||
259 | |||
260 | for o in objset.foreach_type(oe.spdx30.SoftwareAgent): | ||
261 | if o.name == "CI Tool": | ||
262 | break | ||
263 | else: | ||
264 | self.assertTrue(False, "Unable to find software tool") | ||
265 | |||
266 | for o in objset.foreach_type(oe.spdx30.Person): | ||
267 | if o.name == "John Doe": | ||
268 | break | ||
269 | else: | ||
270 | self.assertTrue(False, "Unable to find person") | ||
271 | |||
272 | for o in objset.foreach_type(oe.spdx30.Organization): | ||
273 | if o.name == "ACME Embedded Widgets": | ||
274 | break | ||
275 | else: | ||
276 | self.assertTrue(False, "Unable to find organization") | ||
277 | |||
278 | for o in objset.foreach_type(oe.spdx30.SpdxDocument): | ||
279 | doc = o | ||
280 | break | ||
281 | else: | ||
282 | self.assertTrue(False, "Unable to find SpdxDocument") | ||
283 | |||
284 | for i in doc.import_: | ||
285 | if i.externalSpdxId == HOST_SPDXID: | ||
286 | break | ||
287 | else: | ||
288 | self.assertTrue(False, "Unable to find imported Host SpdxID") | ||
diff --git a/meta/lib/oeqa/selftest/cases/sstatetests.py b/meta/lib/oeqa/selftest/cases/sstatetests.py index 86d6cd7464..08f94b168a 100644 --- a/meta/lib/oeqa/selftest/cases/sstatetests.py +++ b/meta/lib/oeqa/selftest/cases/sstatetests.py | |||
@@ -27,17 +27,15 @@ class SStateBase(OESelftestTestCase): | |||
27 | def setUpLocal(self): | 27 | def setUpLocal(self): |
28 | super(SStateBase, self).setUpLocal() | 28 | super(SStateBase, self).setUpLocal() |
29 | self.temp_sstate_location = None | 29 | self.temp_sstate_location = None |
30 | needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH', | 30 | needed_vars = ['SSTATE_DIR', 'TCLIBC', 'TUNE_ARCH', |
31 | 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS'] | 31 | 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS'] |
32 | bb_vars = get_bb_vars(needed_vars) | 32 | bb_vars = get_bb_vars(needed_vars) |
33 | self.sstate_path = bb_vars['SSTATE_DIR'] | 33 | self.sstate_path = bb_vars['SSTATE_DIR'] |
34 | self.hostdistro = bb_vars['NATIVELSBSTRING'] | ||
35 | self.tclibc = bb_vars['TCLIBC'] | 34 | self.tclibc = bb_vars['TCLIBC'] |
36 | self.tune_arch = bb_vars['TUNE_ARCH'] | 35 | self.tune_arch = bb_vars['TUNE_ARCH'] |
37 | self.topdir = bb_vars['TOPDIR'] | 36 | self.topdir = bb_vars['TOPDIR'] |
38 | self.target_vendor = bb_vars['TARGET_VENDOR'] | 37 | self.target_vendor = bb_vars['TARGET_VENDOR'] |
39 | self.target_os = bb_vars['TARGET_OS'] | 38 | self.target_os = bb_vars['TARGET_OS'] |
40 | self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro) | ||
41 | 39 | ||
42 | def track_for_cleanup(self, path): | 40 | def track_for_cleanup(self, path): |
43 | if not keep_temp_files: | 41 | if not keep_temp_files: |
@@ -52,10 +50,7 @@ class SStateBase(OESelftestTestCase): | |||
52 | config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path | 50 | config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path |
53 | self.append_config(config_temp_sstate) | 51 | self.append_config(config_temp_sstate) |
54 | self.track_for_cleanup(temp_sstate_path) | 52 | self.track_for_cleanup(temp_sstate_path) |
55 | bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING']) | 53 | self.sstate_path = get_bb_var('SSTATE_DIR') |
56 | self.sstate_path = bb_vars['SSTATE_DIR'] | ||
57 | self.hostdistro = bb_vars['NATIVELSBSTRING'] | ||
58 | self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro) | ||
59 | 54 | ||
60 | if add_local_mirrors: | 55 | if add_local_mirrors: |
61 | config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""' | 56 | config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""' |
@@ -65,8 +60,16 @@ class SStateBase(OESelftestTestCase): | |||
65 | config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror | 60 | config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror |
66 | self.append_config(config_sstate_mirror) | 61 | self.append_config(config_sstate_mirror) |
67 | 62 | ||
63 | def set_hostdistro(self): | ||
64 | # This needs to be read after a BuildStarted event in case it gets changed by event | ||
65 | # handling in uninative.bbclass | ||
66 | self.hostdistro = get_bb_var('NATIVELSBSTRING') | ||
67 | self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro) | ||
68 | |||
68 | # Returns a list containing sstate files | 69 | # Returns a list containing sstate files |
69 | def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True): | 70 | def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True): |
71 | self.set_hostdistro() | ||
72 | |||
70 | result = [] | 73 | result = [] |
71 | for root, dirs, files in os.walk(self.sstate_path): | 74 | for root, dirs, files in os.walk(self.sstate_path): |
72 | if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root): | 75 | if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root): |
@@ -80,55 +83,43 @@ class SStateBase(OESelftestTestCase): | |||
80 | return result | 83 | return result |
81 | 84 | ||
82 | # Test sstate files creation and their location and directory perms | 85 | # Test sstate files creation and their location and directory perms |
83 | def run_test_sstate_creation(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True, should_pass=True): | 86 | def run_test_sstate_creation(self, targets, hostdistro_specific): |
84 | self.config_sstate(temp_sstate_location, [self.sstate_path]) | 87 | self.config_sstate(True, [self.sstate_path]) |
88 | |||
89 | bitbake(['-cclean'] + targets) | ||
85 | 90 | ||
86 | if self.temp_sstate_location: | ||
87 | bitbake(['-cclean'] + targets) | ||
88 | else: | ||
89 | bitbake(['-ccleansstate'] + targets) | ||
90 | |||
91 | # We need to test that the env umask have does not effect sstate directory creation | ||
92 | # So, first, we'll get the current umask and set it to something we know incorrect | ||
93 | # See: sstate_task_postfunc for correct umask of os.umask(0o002) | ||
94 | import os | ||
95 | def current_umask(): | ||
96 | current_umask = os.umask(0) | ||
97 | os.umask(current_umask) | ||
98 | return current_umask | ||
99 | |||
100 | orig_umask = current_umask() | ||
101 | # Set it to a umask we know will be 'wrong' | 91 | # Set it to a umask we know will be 'wrong' |
102 | os.umask(0o022) | 92 | with bb.utils.umask(0o022): |
93 | bitbake(targets) | ||
103 | 94 | ||
104 | bitbake(targets) | 95 | # Distro specific files |
105 | file_tracker = [] | 96 | distro_specific_files = self.search_sstate('|'.join(map(str, targets)), True, False) |
106 | results = self.search_sstate('|'.join(map(str, targets)), distro_specific, distro_nonspecific) | ||
107 | if distro_nonspecific: | ||
108 | for r in results: | ||
109 | if r.endswith(("_populate_lic.tar.zst", "_populate_lic.tar.zst.siginfo", "_fetch.tar.zst.siginfo", "_unpack.tar.zst.siginfo", "_patch.tar.zst.siginfo")): | ||
110 | continue | ||
111 | file_tracker.append(r) | ||
112 | else: | ||
113 | file_tracker = results | ||
114 | 97 | ||
115 | if should_pass: | 98 | # Distro non-specific |
116 | self.assertTrue(file_tracker , msg="Could not find sstate files for: %s" % ', '.join(map(str, targets))) | 99 | distro_non_specific_files = [] |
100 | results = self.search_sstate('|'.join(map(str, targets)), False, True) | ||
101 | for r in results: | ||
102 | if r.endswith(("_populate_lic.tar.zst", "_populate_lic.tar.zst.siginfo", "_fetch.tar.zst.siginfo", "_unpack.tar.zst.siginfo", "_patch.tar.zst.siginfo")): | ||
103 | continue | ||
104 | distro_non_specific_files.append(r) | ||
105 | |||
106 | if hostdistro_specific: | ||
107 | self.assertTrue(distro_specific_files , msg="Could not find sstate files for: %s" % ', '.join(map(str, targets))) | ||
108 | self.assertFalse(distro_non_specific_files, msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(distro_non_specific_files))) | ||
117 | else: | 109 | else: |
118 | self.assertTrue(not file_tracker , msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(file_tracker))) | 110 | self.assertTrue(distro_non_specific_files , msg="Could not find sstate files for: %s" % ', '.join(map(str, targets))) |
111 | self.assertFalse(distro_specific_files, msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(distro_specific_files))) | ||
119 | 112 | ||
120 | # Now we'll walk the tree to check the mode and see if things are incorrect. | 113 | # Now we'll walk the tree to check the mode and see if things are incorrect. |
121 | badperms = [] | 114 | badperms = [] |
122 | for root, dirs, files in os.walk(self.sstate_path): | 115 | for root, dirs, files in os.walk(self.sstate_path): |
123 | for directory in dirs: | 116 | for directory in dirs: |
124 | if (os.stat(os.path.join(root, directory)).st_mode & 0o777) != 0o775: | 117 | mode = os.stat(os.path.join(root, directory)).st_mode & 0o777 |
125 | badperms.append(os.path.join(root, directory)) | 118 | if mode != 0o775: |
126 | 119 | badperms.append("%s: %s vs %s" % (os.path.join(root, directory), mode, 0o775)) | |
127 | # Return to original umask | ||
128 | os.umask(orig_umask) | ||
129 | 120 | ||
130 | if should_pass: | 121 | # Check badperms is empty |
131 | self.assertTrue(badperms , msg="Found sstate directories with the wrong permissions: %s (found %s)" % (', '.join(map(str, targets)), str(badperms))) | 122 | self.assertFalse(badperms , msg="Found sstate directories with the wrong permissions: %s (found %s)" % (', '.join(map(str, targets)), str(badperms))) |
132 | 123 | ||
133 | # Test the sstate files deletion part of the do_cleansstate task | 124 | # Test the sstate files deletion part of the do_cleansstate task |
134 | def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True): | 125 | def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True): |
@@ -153,6 +144,8 @@ class SStateBase(OESelftestTestCase): | |||
153 | 144 | ||
154 | bitbake(['-ccleansstate'] + targets) | 145 | bitbake(['-ccleansstate'] + targets) |
155 | 146 | ||
147 | self.set_hostdistro() | ||
148 | |||
156 | bitbake(targets) | 149 | bitbake(targets) |
157 | results = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=False, distro_nonspecific=True) | 150 | results = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=False, distro_nonspecific=True) |
158 | filtered_results = [] | 151 | filtered_results = [] |
@@ -251,17 +244,11 @@ class SStateTests(SStateBase): | |||
251 | bitbake("dbus-wait-test -c unpack") | 244 | bitbake("dbus-wait-test -c unpack") |
252 | 245 | ||
253 | class SStateCreation(SStateBase): | 246 | class SStateCreation(SStateBase): |
254 | def test_sstate_creation_distro_specific_pass(self): | 247 | def test_sstate_creation_distro_specific(self): |
255 | self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True) | 248 | self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], hostdistro_specific=True) |
256 | |||
257 | def test_sstate_creation_distro_specific_fail(self): | ||
258 | self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False) | ||
259 | 249 | ||
260 | def test_sstate_creation_distro_nonspecific_pass(self): | 250 | def test_sstate_creation_distro_nonspecific(self): |
261 | self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True) | 251 | self.run_test_sstate_creation(['linux-libc-headers'], hostdistro_specific=False) |
262 | |||
263 | def test_sstate_creation_distro_nonspecific_fail(self): | ||
264 | self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False) | ||
265 | 252 | ||
266 | class SStateCleanup(SStateBase): | 253 | class SStateCleanup(SStateBase): |
267 | def test_cleansstate_task_distro_specific_nonspecific(self): | 254 | def test_cleansstate_task_distro_specific_nonspecific(self): |
@@ -367,18 +354,11 @@ class SStateCacheManagement(SStateBase): | |||
367 | self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic']) | 354 | self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic']) |
368 | 355 | ||
369 | class SStateHashSameSigs(SStateBase): | 356 | class SStateHashSameSigs(SStateBase): |
370 | def test_sstate_32_64_same_hash(self): | 357 | def sstate_hashtest(self, sdkmachine): |
371 | """ | ||
372 | The sstate checksums for both native and target should not vary whether | ||
373 | they're built on a 32 or 64 bit system. Rather than requiring two different | ||
374 | build machines and running a builds, override the variables calling uname() | ||
375 | manually and check using bitbake -S. | ||
376 | """ | ||
377 | 358 | ||
378 | self.write_config(""" | 359 | self.write_config(""" |
379 | MACHINE = "qemux86" | 360 | MACHINE = "qemux86" |
380 | TMPDIR = "${TOPDIR}/tmp-sstatesamehash" | 361 | TMPDIR = "${TOPDIR}/tmp-sstatesamehash" |
381 | TCLIBCAPPEND = "" | ||
382 | BUILD_ARCH = "x86_64" | 362 | BUILD_ARCH = "x86_64" |
383 | BUILD_OS = "linux" | 363 | BUILD_OS = "linux" |
384 | SDKMACHINE = "x86_64" | 364 | SDKMACHINE = "x86_64" |
@@ -390,13 +370,12 @@ BB_SIGNATURE_HANDLER = "OEBasicHash" | |||
390 | self.write_config(""" | 370 | self.write_config(""" |
391 | MACHINE = "qemux86" | 371 | MACHINE = "qemux86" |
392 | TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" | 372 | TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" |
393 | TCLIBCAPPEND = "" | ||
394 | BUILD_ARCH = "i686" | 373 | BUILD_ARCH = "i686" |
395 | BUILD_OS = "linux" | 374 | BUILD_OS = "linux" |
396 | SDKMACHINE = "i686" | 375 | SDKMACHINE = "%s" |
397 | PACKAGE_CLASSES = "package_rpm package_ipk package_deb" | 376 | PACKAGE_CLASSES = "package_rpm package_ipk package_deb" |
398 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 377 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
399 | """) | 378 | """ % sdkmachine) |
400 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") | 379 | self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") |
401 | bitbake("core-image-weston -S none") | 380 | bitbake("core-image-weston -S none") |
402 | 381 | ||
@@ -416,6 +395,20 @@ BB_SIGNATURE_HANDLER = "OEBasicHash" | |||
416 | self.maxDiff = None | 395 | self.maxDiff = None |
417 | self.assertCountEqual(files1, files2) | 396 | self.assertCountEqual(files1, files2) |
418 | 397 | ||
398 | def test_sstate_32_64_same_hash(self): | ||
399 | """ | ||
400 | The sstate checksums for both native and target should not vary whether | ||
401 | they're built on a 32 or 64 bit system. Rather than requiring two different | ||
402 | build machines and running a builds, override the variables calling uname() | ||
403 | manually and check using bitbake -S. | ||
404 | """ | ||
405 | self.sstate_hashtest("i686") | ||
406 | |||
407 | def test_sstate_sdk_arch_same_hash(self): | ||
408 | """ | ||
409 | Similarly, test an arm SDK has the same hashes | ||
410 | """ | ||
411 | self.sstate_hashtest("aarch64") | ||
419 | 412 | ||
420 | def test_sstate_nativelsbstring_same_hash(self): | 413 | def test_sstate_nativelsbstring_same_hash(self): |
421 | """ | 414 | """ |
@@ -426,7 +419,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash" | |||
426 | 419 | ||
427 | self.write_config(""" | 420 | self.write_config(""" |
428 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" | 421 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" |
429 | TCLIBCAPPEND = \"\" | ||
430 | NATIVELSBSTRING = \"DistroA\" | 422 | NATIVELSBSTRING = \"DistroA\" |
431 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 423 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
432 | """) | 424 | """) |
@@ -434,7 +426,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash" | |||
434 | bitbake("core-image-weston -S none") | 426 | bitbake("core-image-weston -S none") |
435 | self.write_config(""" | 427 | self.write_config(""" |
436 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" | 428 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" |
437 | TCLIBCAPPEND = \"\" | ||
438 | NATIVELSBSTRING = \"DistroB\" | 429 | NATIVELSBSTRING = \"DistroB\" |
439 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 430 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
440 | """) | 431 | """) |
@@ -463,17 +454,17 @@ class SStateHashSameSigs2(SStateBase): | |||
463 | 454 | ||
464 | configA = """ | 455 | configA = """ |
465 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" | 456 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" |
466 | TCLIBCAPPEND = \"\" | ||
467 | MACHINE = \"qemux86-64\" | 457 | MACHINE = \"qemux86-64\" |
468 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 458 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
469 | """ | 459 | """ |
470 | #OLDEST_KERNEL is arch specific so set to a different value here for testing | 460 | #OLDEST_KERNEL is arch specific so set to a different value here for testing |
471 | configB = """ | 461 | configB = """ |
472 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" | 462 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" |
473 | TCLIBCAPPEND = \"\" | ||
474 | MACHINE = \"qemuarm\" | 463 | MACHINE = \"qemuarm\" |
475 | OLDEST_KERNEL = \"3.3.0\" | 464 | OLDEST_KERNEL = \"3.3.0\" |
476 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 465 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
466 | ERROR_QA:append = " somenewoption" | ||
467 | WARN_QA:append = " someotheroption" | ||
477 | """ | 468 | """ |
478 | self.sstate_common_samesigs(configA, configB, allarch=True) | 469 | self.sstate_common_samesigs(configA, configB, allarch=True) |
479 | 470 | ||
@@ -484,7 +475,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash" | |||
484 | 475 | ||
485 | configA = """ | 476 | configA = """ |
486 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" | 477 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" |
487 | TCLIBCAPPEND = \"\" | ||
488 | MACHINE = \"qemux86-64\" | 478 | MACHINE = \"qemux86-64\" |
489 | require conf/multilib.conf | 479 | require conf/multilib.conf |
490 | MULTILIBS = \"multilib:lib32\" | 480 | MULTILIBS = \"multilib:lib32\" |
@@ -493,7 +483,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash" | |||
493 | """ | 483 | """ |
494 | configB = """ | 484 | configB = """ |
495 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" | 485 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" |
496 | TCLIBCAPPEND = \"\" | ||
497 | MACHINE = \"qemuarm\" | 486 | MACHINE = \"qemuarm\" |
498 | require conf/multilib.conf | 487 | require conf/multilib.conf |
499 | MULTILIBS = \"\" | 488 | MULTILIBS = \"\" |
@@ -511,7 +500,6 @@ class SStateHashSameSigs3(SStateBase): | |||
511 | 500 | ||
512 | self.write_config(""" | 501 | self.write_config(""" |
513 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" | 502 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" |
514 | TCLIBCAPPEND = \"\" | ||
515 | MACHINE = \"qemux86\" | 503 | MACHINE = \"qemux86\" |
516 | require conf/multilib.conf | 504 | require conf/multilib.conf |
517 | MULTILIBS = "multilib:lib32" | 505 | MULTILIBS = "multilib:lib32" |
@@ -522,7 +510,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash" | |||
522 | bitbake("world meta-toolchain -S none") | 510 | bitbake("world meta-toolchain -S none") |
523 | self.write_config(""" | 511 | self.write_config(""" |
524 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" | 512 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" |
525 | TCLIBCAPPEND = \"\" | ||
526 | MACHINE = \"qemux86copy\" | 513 | MACHINE = \"qemux86copy\" |
527 | require conf/multilib.conf | 514 | require conf/multilib.conf |
528 | MULTILIBS = "multilib:lib32" | 515 | MULTILIBS = "multilib:lib32" |
@@ -559,7 +546,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash" | |||
559 | 546 | ||
560 | self.write_config(""" | 547 | self.write_config(""" |
561 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" | 548 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" |
562 | TCLIBCAPPEND = \"\" | ||
563 | MACHINE = \"qemux86\" | 549 | MACHINE = \"qemux86\" |
564 | require conf/multilib.conf | 550 | require conf/multilib.conf |
565 | MULTILIBS = "multilib:lib32" | 551 | MULTILIBS = "multilib:lib32" |
@@ -570,7 +556,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash" | |||
570 | bitbake("binutils-native -S none") | 556 | bitbake("binutils-native -S none") |
571 | self.write_config(""" | 557 | self.write_config(""" |
572 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" | 558 | TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" |
573 | TCLIBCAPPEND = \"\" | ||
574 | MACHINE = \"qemux86copy\" | 559 | MACHINE = \"qemux86copy\" |
575 | BB_SIGNATURE_HANDLER = "OEBasicHash" | 560 | BB_SIGNATURE_HANDLER = "OEBasicHash" |
576 | """) | 561 | """) |
@@ -598,7 +583,6 @@ class SStateHashSameSigs4(SStateBase): | |||
598 | 583 | ||
599 | self.write_config(""" | 584 | self.write_config(""" |
600 | TMPDIR = "${TOPDIR}/tmp-sstatesamehash" | 585 | TMPDIR = "${TOPDIR}/tmp-sstatesamehash" |
601 | TCLIBCAPPEND = "" | ||
602 | BB_NUMBER_THREADS = "${@oe.utils.cpu_count()}" | 586 | BB_NUMBER_THREADS = "${@oe.utils.cpu_count()}" |
603 | PARALLEL_MAKE = "-j 1" | 587 | PARALLEL_MAKE = "-j 1" |
604 | DL_DIR = "${TOPDIR}/download1" | 588 | DL_DIR = "${TOPDIR}/download1" |
@@ -613,7 +597,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash" | |||
613 | bitbake("world meta-toolchain -S none") | 597 | bitbake("world meta-toolchain -S none") |
614 | self.write_config(""" | 598 | self.write_config(""" |
615 | TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" | 599 | TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" |
616 | TCLIBCAPPEND = "" | ||
617 | BB_NUMBER_THREADS = "${@oe.utils.cpu_count()+1}" | 600 | BB_NUMBER_THREADS = "${@oe.utils.cpu_count()+1}" |
618 | PARALLEL_MAKE = "-j 2" | 601 | PARALLEL_MAKE = "-j 2" |
619 | DL_DIR = "${TOPDIR}/download2" | 602 | DL_DIR = "${TOPDIR}/download2" |
@@ -724,7 +707,6 @@ class SStateFindSiginfo(SStateBase): | |||
724 | """ | 707 | """ |
725 | self.write_config(""" | 708 | self.write_config(""" |
726 | TMPDIR = \"${TOPDIR}/tmp-sstates-findsiginfo\" | 709 | TMPDIR = \"${TOPDIR}/tmp-sstates-findsiginfo\" |
727 | TCLIBCAPPEND = \"\" | ||
728 | MACHINE = \"qemux86-64\" | 710 | MACHINE = \"qemux86-64\" |
729 | require conf/multilib.conf | 711 | require conf/multilib.conf |
730 | MULTILIBS = "multilib:lib32" | 712 | MULTILIBS = "multilib:lib32" |
@@ -917,15 +899,24 @@ INHERIT += "base-do-configure-modified" | |||
917 | """, | 899 | """, |
918 | expected_sametmp_output, expected_difftmp_output) | 900 | expected_sametmp_output, expected_difftmp_output) |
919 | 901 | ||
920 | @OETestTag("yocto-mirrors") | 902 | class SStateCheckObjectPresence(SStateBase): |
921 | class SStateMirrors(SStateBase): | 903 | def check_bb_output(self, output, targets, exceptions, check_cdn): |
922 | def check_bb_output(self, output, exceptions, check_cdn): | ||
923 | def is_exception(object, exceptions): | 904 | def is_exception(object, exceptions): |
924 | for e in exceptions: | 905 | for e in exceptions: |
925 | if re.search(e, object): | 906 | if re.search(e, object): |
926 | return True | 907 | return True |
927 | return False | 908 | return False |
928 | 909 | ||
910 | # sstate is checked for existence of these, but they never get written out to begin with | ||
911 | exceptions += ["{}.*image_qa".format(t) for t in targets.split()] | ||
912 | exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()] | ||
913 | exceptions += ["{}.*image_complete".format(t) for t in targets.split()] | ||
914 | exceptions += ["linux-yocto.*shared_workdir"] | ||
915 | # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64) | ||
916 | # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks, | ||
917 | # which makes tracing other changes difficult | ||
918 | exceptions += ["{}.*create_.*spdx".format(t) for t in targets.split()] | ||
919 | |||
929 | output_l = output.splitlines() | 920 | output_l = output.splitlines() |
930 | for l in output_l: | 921 | for l in output_l: |
931 | if l.startswith("Sstate summary"): | 922 | if l.startswith("Sstate summary"): |
@@ -960,24 +951,15 @@ class SStateMirrors(SStateBase): | |||
960 | self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo))) | 951 | self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo))) |
961 | self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo))) | 952 | self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo))) |
962 | 953 | ||
954 | @OETestTag("yocto-mirrors") | ||
955 | class SStateMirrors(SStateCheckObjectPresence): | ||
963 | def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False): | 956 | def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False): |
964 | # sstate is checked for existence of these, but they never get written out to begin with | ||
965 | exceptions += ["{}.*image_qa".format(t) for t in targets.split()] | ||
966 | exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()] | ||
967 | exceptions += ["{}.*image_complete".format(t) for t in targets.split()] | ||
968 | exceptions += ["linux-yocto.*shared_workdir"] | ||
969 | # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64) | ||
970 | # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks, | ||
971 | # which makes tracing other changes difficult | ||
972 | exceptions += ["{}.*create_spdx".format(t) for t in targets.split()] | ||
973 | exceptions += ["{}.*create_runtime_spdx".format(t) for t in targets.split()] | ||
974 | |||
975 | if check_cdn: | 957 | if check_cdn: |
976 | self.config_sstate(True) | 958 | self.config_sstate(True) |
977 | self.append_config(""" | 959 | self.append_config(""" |
978 | MACHINE = "{}" | 960 | MACHINE = "{}" |
979 | BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687" | 961 | BB_HASHSERVE_UPSTREAM = "hashserv.yoctoproject.org:8686" |
980 | SSTATE_MIRRORS ?= "file://.* http://cdn.jsdelivr.net/yocto/sstate/all/PATH;downloadfilename=PATH" | 962 | SSTATE_MIRRORS ?= "file://.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH" |
981 | """.format(machine)) | 963 | """.format(machine)) |
982 | else: | 964 | else: |
983 | self.append_config(""" | 965 | self.append_config(""" |
@@ -987,7 +969,7 @@ MACHINE = "{}" | |||
987 | bitbake("-S none {}".format(targets)) | 969 | bitbake("-S none {}".format(targets)) |
988 | if ignore_errors: | 970 | if ignore_errors: |
989 | return | 971 | return |
990 | self.check_bb_output(result.output, exceptions, check_cdn) | 972 | self.check_bb_output(result.output, targets, exceptions, check_cdn) |
991 | 973 | ||
992 | def test_cdn_mirror_qemux86_64(self): | 974 | def test_cdn_mirror_qemux86_64(self): |
993 | exceptions = [] | 975 | exceptions = [] |
diff --git a/meta/lib/oeqa/selftest/cases/toolchain.py b/meta/lib/oeqa/selftest/cases/toolchain.py new file mode 100644 index 0000000000..b4b280d037 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/toolchain.py | |||
@@ -0,0 +1,71 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | import shutil | ||
8 | import subprocess | ||
9 | import tempfile | ||
10 | from types import SimpleNamespace | ||
11 | |||
12 | import oe.path | ||
13 | from oeqa.selftest.case import OESelftestTestCase | ||
14 | from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars | ||
15 | |||
16 | class ToolchainTests(OESelftestTestCase): | ||
17 | |||
18 | def test_toolchain_switching(self): | ||
19 | """ | ||
20 | Test that a configuration that uses GCC by default but clang for one | ||
21 | specific recipe does infact do that. | ||
22 | """ | ||
23 | |||
24 | def extract_comment(objcopy, filename): | ||
25 | """ | ||
26 | Using the specified `objcopy`, return the .comment segment from | ||
27 | `filename` as a bytes(). | ||
28 | """ | ||
29 | with tempfile.NamedTemporaryFile(prefix="comment-") as f: | ||
30 | cmd = [objcopy, "--dump-section", ".comment=" + f.name, filename] | ||
31 | subprocess.run(cmd, check=True) | ||
32 | # clang's objcopy writes to a temporary file and renames, so we need to re-open. | ||
33 | with open(f.name, "rb") as f2: | ||
34 | return f2.read() | ||
35 | |||
36 | def check_recipe(recipe, filename, override, comment_present, comment_absent=None): | ||
37 | """ | ||
38 | Check that `filename` in `recipe`'s bindir contains `comment`, and | ||
39 | the overrides contain `override`. | ||
40 | """ | ||
41 | d = SimpleNamespace(**get_bb_vars(("D", "bindir", "OBJCOPY", "OVERRIDES", "PATH"), target=recipe)) | ||
42 | |||
43 | self.assertIn(override, d.OVERRIDES) | ||
44 | |||
45 | binary = oe.path.join(d.D, d.bindir, filename) | ||
46 | |||
47 | objcopy = shutil.which(d.OBJCOPY, path=d.PATH) | ||
48 | self.assertIsNotNone(objcopy) | ||
49 | |||
50 | comment = extract_comment(objcopy, binary) | ||
51 | self.assertIn(comment_present, comment) | ||
52 | if comment_absent: | ||
53 | self.assertNotIn(comment_absent, comment) | ||
54 | |||
55 | |||
56 | # GCC by default, clang for selftest-hello. | ||
57 | self.write_config(""" | ||
58 | TOOLCHAIN = "gcc" | ||
59 | TOOLCHAIN:pn-selftest-hello = "clang" | ||
60 | """) | ||
61 | |||
62 | # Force these recipes to re-install so we can extract the .comments from | ||
63 | # the install directory, as they're stripped out of the final packages. | ||
64 | bitbake("m4 selftest-hello -C install") | ||
65 | |||
66 | # m4 should be built with GCC and only GCC | ||
67 | check_recipe("m4", "m4", "toolchain-gcc", b"GCC: (GNU)", b"clang") | ||
68 | |||
69 | # helloworld should be built with clang. We can't assert that GCC is not | ||
70 | # present as it will be linked against glibc which is built with GCC. | ||
71 | check_recipe("selftest-hello", "helloworld", "toolchain-clang", b"clang version") | ||
diff --git a/meta/lib/oeqa/selftest/cases/uboot.py b/meta/lib/oeqa/selftest/cases/uboot.py new file mode 100644 index 0000000000..980ea327f0 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/uboot.py | |||
@@ -0,0 +1,98 @@ | |||
1 | # Qemu-based u-boot bootloader integration testing | ||
2 | # | ||
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
5 | # SPDX-License-Identifier: MIT | ||
6 | # | ||
7 | |||
8 | from oeqa.selftest.case import OESelftestTestCase | ||
9 | from oeqa.utils.commands import bitbake, runqemu, get_bb_var, get_bb_vars, runCmd | ||
10 | from oeqa.core.decorator.data import skipIfNotArch, skipIfNotBuildArch | ||
11 | from oeqa.core.decorator import OETestTag | ||
12 | |||
13 | uboot_boot_patterns = { | ||
14 | 'search_reached_prompt': "stop autoboot", | ||
15 | 'search_login_succeeded': "=>", | ||
16 | 'search_cmd_finished': "=>" | ||
17 | } | ||
18 | |||
19 | |||
20 | class UBootTest(OESelftestTestCase): | ||
21 | |||
22 | @skipIfNotArch(['arm', 'aarch64']) | ||
23 | @OETestTag("runqemu") | ||
24 | def test_boot_uboot(self): | ||
25 | """ | ||
26 | Tests building u-boot and booting it with QEMU | ||
27 | """ | ||
28 | |||
29 | self.write_config(""" | ||
30 | QB_DEFAULT_BIOS = "u-boot.bin" | ||
31 | PREFERRED_PROVIDER_virtual/bootloader = "u-boot" | ||
32 | QEMU_USE_KVM = "False" | ||
33 | """) | ||
34 | bitbake("virtual/bootloader core-image-minimal") | ||
35 | |||
36 | with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic', | ||
37 | boot_patterns=uboot_boot_patterns) as qemu: | ||
38 | |||
39 | # test if u-boot console works | ||
40 | cmd = "version" | ||
41 | status, output = qemu.run_serial(cmd) | ||
42 | self.assertEqual(status, 1, msg=output) | ||
43 | self.assertTrue("U-Boot" in output, msg=output) | ||
44 | |||
45 | @skipIfNotArch(['aarch64']) | ||
46 | @skipIfNotBuildArch(['aarch64']) | ||
47 | @OETestTag("runqemu") | ||
48 | def test_boot_uboot_kvm_to_full_target(self): | ||
49 | """ | ||
50 | Tests building u-boot and booting it with QEMU and KVM. | ||
51 | Requires working KVM on build host. See "kvm-ok" output. | ||
52 | """ | ||
53 | |||
54 | runCmd("kvm-ok") | ||
55 | |||
56 | image = "core-image-minimal" | ||
57 | vars = get_bb_vars(['HOST_ARCH', 'BUILD_ARCH'], image) | ||
58 | host_arch = vars['HOST_ARCH'] | ||
59 | build_arch = vars['BUILD_ARCH'] | ||
60 | |||
61 | self.assertEqual(host_arch, build_arch, 'HOST_ARCH %s and BUILD_ARCH %s must match for KVM' % (host_arch, build_arch)) | ||
62 | |||
63 | self.write_config(""" | ||
64 | QEMU_USE_KVM = "1" | ||
65 | |||
66 | # Using u-boot in EFI mode, need ESP partition for grub/systemd-boot/kernel etc | ||
67 | IMAGE_FSTYPES:pn-core-image-minimal:append = " wic" | ||
68 | |||
69 | # easiest to follow genericarm64 setup with wks file, initrd and EFI loader | ||
70 | INITRAMFS_IMAGE = "core-image-initramfs-boot" | ||
71 | EFI_PROVIDER = "${@bb.utils.contains("DISTRO_FEATURES", "systemd", "systemd-boot", "grub-efi", d)}" | ||
72 | WKS_FILE = "genericarm64.wks.in" | ||
73 | |||
74 | # use wic image with ESP for u-boot, not ext4 | ||
75 | QB_DEFAULT_FSTYPE = "wic" | ||
76 | |||
77 | PREFERRED_PROVIDER_virtual/bootloader = "u-boot" | ||
78 | QB_DEFAULT_BIOS = "u-boot.bin" | ||
79 | |||
80 | # let u-boot or EFI loader load kernel from ESP | ||
81 | QB_DEFAULT_KERNEL = "none" | ||
82 | |||
83 | # virt pci, not scsi because support not in u-boot to find ESP | ||
84 | QB_DRIVE_TYPE = "/dev/vd" | ||
85 | """) | ||
86 | bitbake("virtual/bootloader %s" % image) | ||
87 | |||
88 | runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', image) or "" | ||
89 | with runqemu(image, ssh=False, runqemuparams='nographic kvm %s' % runqemu_params) as qemu: | ||
90 | |||
91 | # boot to target and login worked, should have been fast with kvm | ||
92 | cmd = "dmesg" | ||
93 | status, output = qemu.run_serial(cmd) | ||
94 | self.assertEqual(status, 1, msg=output) | ||
95 | # Machine is qemu | ||
96 | self.assertTrue("Machine model: linux,dummy-virt" in output, msg=output) | ||
97 | # with KVM enabled | ||
98 | self.assertTrue("KVM: hypervisor services detected" in output, msg=output) | ||
diff --git a/meta/lib/oeqa/selftest/cases/uki.py b/meta/lib/oeqa/selftest/cases/uki.py new file mode 100644 index 0000000000..9a1aa4e269 --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/uki.py | |||
@@ -0,0 +1,141 @@ | |||
1 | # Based on runqemu.py test file | ||
2 | # | ||
3 | # Copyright (c) 2017 Wind River Systems, Inc. | ||
4 | # | ||
5 | # SPDX-License-Identifier: MIT | ||
6 | # | ||
7 | |||
8 | from oeqa.selftest.case import OESelftestTestCase | ||
9 | from oeqa.utils.commands import bitbake, runqemu, get_bb_var | ||
10 | from oeqa.core.decorator.data import skipIfNotArch | ||
11 | from oeqa.core.decorator import OETestTag | ||
12 | import oe.types | ||
13 | |||
14 | class UkiTest(OESelftestTestCase): | ||
15 | """Boot Unified Kernel Image (UKI) generated with uki.bbclass on UEFI firmware (omvf/edk2)""" | ||
16 | |||
17 | @skipIfNotArch(['i586', 'i686', 'x86_64']) | ||
18 | @OETestTag("runqemu") | ||
19 | def test_uki_boot_systemd(self): | ||
20 | """Build and boot into UEFI firmware (omvf/edk2), systemd-boot, initrd without systemd, rootfs with systemd""" | ||
21 | image = "core-image-minimal" | ||
22 | runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', image) or "" | ||
23 | cmd = "runqemu %s nographic serial wic ovmf" % (runqemu_params) | ||
24 | if oe.types.qemu_use_kvm(self.td.get('QEMU_USE_KVM', 0), self.td["TARGET_ARCH"]): | ||
25 | cmd += " kvm" | ||
26 | |||
27 | self.write_config(""" | ||
28 | # efi firmware must load systemd-boot, not grub | ||
29 | EFI_PROVIDER = "systemd-boot" | ||
30 | |||
31 | # image format must be wic, needs esp partition for firmware etc | ||
32 | IMAGE_FSTYPES:pn-%s:append = " wic" | ||
33 | WKS_FILE = "efi-uki-bootdisk.wks.in" | ||
34 | |||
35 | # efi, uki and systemd features must be enabled | ||
36 | INIT_MANAGER = "systemd" | ||
37 | MACHINE_FEATURES:append = " efi" | ||
38 | IMAGE_CLASSES:append:pn-core-image-minimal = " uki" | ||
39 | |||
40 | # uki embeds also an initrd | ||
41 | INITRAMFS_IMAGE = "core-image-minimal-initramfs" | ||
42 | |||
43 | # runqemu must not load kernel separately, it's in the uki | ||
44 | QB_KERNEL_ROOT = "" | ||
45 | QB_DEFAULT_KERNEL = "none" | ||
46 | |||
47 | # boot command line provided via uki, not via bootloader | ||
48 | UKI_CMDLINE = "rootwait root=LABEL=root console=${KERNEL_CONSOLE}" | ||
49 | |||
50 | # disable kvm, breaks boot | ||
51 | QEMU_USE_KVM = "" | ||
52 | |||
53 | IMAGE_CLASSES:remove = 'testimage' | ||
54 | """ % (image)) | ||
55 | |||
56 | uki_filename = get_bb_var('UKI_FILENAME', image) | ||
57 | |||
58 | bitbake(image + " ovmf") | ||
59 | with runqemu(image, ssh=False, launch_cmd=cmd) as qemu: | ||
60 | self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd) | ||
61 | |||
62 | # Verify from efivars that firmware was: | ||
63 | # x86_64, qemux86_64, ovmf = edk2 | ||
64 | cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderFirmwareInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep 'EDK II'" | ||
65 | status, output = qemu.run_serial(cmd) | ||
66 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
67 | |||
68 | # Check that systemd-boot was the loader | ||
69 | cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep systemd-boot" | ||
70 | status, output = qemu.run_serial(cmd) | ||
71 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
72 | |||
73 | # Check that systemd-stub was used | ||
74 | cmd = "echo $( cat /sys/firmware/efi/efivars/StubInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep systemd-stub" | ||
75 | status, output = qemu.run_serial(cmd) | ||
76 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
77 | |||
78 | # Check that the compiled uki file was booted into | ||
79 | cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderEntrySelected-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep '%s'" % (uki_filename) | ||
80 | status, output = qemu.run_serial(cmd) | ||
81 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
82 | |||
83 | @skipIfNotArch(['i586', 'i686', 'x86_64']) | ||
84 | @OETestTag("runqemu") | ||
85 | def test_uki_sysvinit(self): | ||
86 | """Build and boot into UEFI firmware (omvf/edk2), systemd-boot, initrd with sysvinit, rootfs with sysvinit""" | ||
87 | config = """ | ||
88 | # efi firmware must load systemd-boot, not grub | ||
89 | EFI_PROVIDER = "systemd-boot" | ||
90 | |||
91 | # image format must be wic, needs esp partition for firmware etc | ||
92 | IMAGE_FSTYPES:pn-core-image-base:append = " wic" | ||
93 | WKS_FILE = "efi-uki-bootdisk.wks.in" | ||
94 | |||
95 | # efi, uki and systemd features must be enabled | ||
96 | MACHINE_FEATURES:append = " efi" | ||
97 | IMAGE_CLASSES:append:pn-core-image-base = " uki" | ||
98 | |||
99 | # uki embeds also an initrd, no systemd or udev | ||
100 | INITRAMFS_IMAGE = "core-image-initramfs-boot" | ||
101 | |||
102 | # runqemu must not load kernel separately, it's in the uki | ||
103 | QB_KERNEL_ROOT = "" | ||
104 | QB_DEFAULT_KERNEL = "none" | ||
105 | |||
106 | # boot command line provided via uki, not via bootloader | ||
107 | UKI_CMDLINE = "rootwait root=LABEL=root console=${KERNEL_CONSOLE}" | ||
108 | |||
109 | # disable kvm, breaks boot | ||
110 | QEMU_USE_KVM = "" | ||
111 | |||
112 | IMAGE_CLASSES:remove = 'testimage' | ||
113 | """ | ||
114 | self.append_config(config) | ||
115 | bitbake('core-image-base ovmf') | ||
116 | runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-base') or "" | ||
117 | uki_filename = get_bb_var('UKI_FILENAME', 'core-image-base') | ||
118 | self.remove_config(config) | ||
119 | |||
120 | with runqemu('core-image-base', ssh=False, | ||
121 | runqemuparams='%s slirp nographic ovmf' % (runqemu_params), image_fstype='wic') as qemu: | ||
122 | # Verify from efivars that firmware was: | ||
123 | # x86_64, qemux86_64, ovmf = edk2 | ||
124 | cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderFirmwareInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep 'EDK II'" | ||
125 | status, output = qemu.run_serial(cmd) | ||
126 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
127 | |||
128 | # Check that systemd-boot was the loader | ||
129 | cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep systemd-boot" | ||
130 | status, output = qemu.run_serial(cmd) | ||
131 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
132 | |||
133 | # Check that systemd-stub was used | ||
134 | cmd = "echo $( cat /sys/firmware/efi/efivars/StubInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep systemd-stub" | ||
135 | status, output = qemu.run_serial(cmd) | ||
136 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
137 | |||
138 | # Check that the compiled uki file was booted into | ||
139 | cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderEntrySelected-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep '%s'" % (uki_filename) | ||
140 | status, output = qemu.run_serial(cmd) | ||
141 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
diff --git a/meta/lib/oeqa/selftest/cases/wic.py b/meta/lib/oeqa/selftest/cases/wic.py index b616759209..680f99d381 100644 --- a/meta/lib/oeqa/selftest/cases/wic.py +++ b/meta/lib/oeqa/selftest/cases/wic.py | |||
@@ -12,6 +12,7 @@ import os | |||
12 | import sys | 12 | import sys |
13 | import unittest | 13 | import unittest |
14 | import hashlib | 14 | import hashlib |
15 | import subprocess | ||
15 | 16 | ||
16 | from glob import glob | 17 | from glob import glob |
17 | from shutil import rmtree, copy | 18 | from shutil import rmtree, copy |
@@ -152,7 +153,7 @@ class Wic(WicTestCase): | |||
152 | # create a temporary file for the WKS content | 153 | # create a temporary file for the WKS content |
153 | with NamedTemporaryFile("w", suffix=".wks") as wks: | 154 | with NamedTemporaryFile("w", suffix=".wks") as wks: |
154 | wks.write( | 155 | wks.write( |
155 | 'part --source bootimg-efi ' | 156 | 'part --source bootimg_efi ' |
156 | '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=false" ' | 157 | '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=false" ' |
157 | '--label boot --active\n' | 158 | '--label boot --active\n' |
158 | ) | 159 | ) |
@@ -185,7 +186,7 @@ class Wic(WicTestCase): | |||
185 | # create a temporary file for the WKS content | 186 | # create a temporary file for the WKS content |
186 | with NamedTemporaryFile("w", suffix=".wks") as wks: | 187 | with NamedTemporaryFile("w", suffix=".wks") as wks: |
187 | wks.write( | 188 | wks.write( |
188 | 'part --source bootimg-efi ' | 189 | 'part --source bootimg_efi ' |
189 | '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=true" ' | 190 | '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=true" ' |
190 | '--label boot --active\n' | 191 | '--label boot --active\n' |
191 | ) | 192 | ) |
@@ -445,8 +446,9 @@ class Wic(WicTestCase): | |||
445 | wks.write(""" | 446 | wks.write(""" |
446 | part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path usr | 447 | part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path usr |
447 | part /usr --source rootfs --ondisk mmcblk0 --fstype=ext4 --rootfs-dir %s/usr | 448 | part /usr --source rootfs --ondisk mmcblk0 --fstype=ext4 --rootfs-dir %s/usr |
448 | part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --rootfs-dir %s/usr""" | 449 | part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --rootfs-dir %s/usr |
449 | % (rootfs_dir, rootfs_dir)) | 450 | part /mnt --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/whoami --rootfs-dir %s/usr""" |
451 | % (rootfs_dir, rootfs_dir, rootfs_dir)) | ||
450 | runCmd("wic create %s -e core-image-minimal -o %s" \ | 452 | runCmd("wic create %s -e core-image-minimal -o %s" \ |
451 | % (wks_file, self.resultdir)) | 453 | % (wks_file, self.resultdir)) |
452 | 454 | ||
@@ -457,7 +459,7 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r | |||
457 | wicimg = wicout[0] | 459 | wicimg = wicout[0] |
458 | 460 | ||
459 | # verify partition size with wic | 461 | # verify partition size with wic |
460 | res = runCmd("parted -m %s unit b p 2>/dev/null" % wicimg) | 462 | res = runCmd("parted -m %s unit b p" % wicimg, stderr=subprocess.PIPE) |
461 | 463 | ||
462 | # parse parted output which looks like this: | 464 | # parse parted output which looks like this: |
463 | # BYT;\n | 465 | # BYT;\n |
@@ -465,9 +467,9 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r | |||
465 | # 1:0.00MiB:200MiB:200MiB:ext4::;\n | 467 | # 1:0.00MiB:200MiB:200MiB:ext4::;\n |
466 | partlns = res.output.splitlines()[2:] | 468 | partlns = res.output.splitlines()[2:] |
467 | 469 | ||
468 | self.assertEqual(3, len(partlns)) | 470 | self.assertEqual(4, len(partlns)) |
469 | 471 | ||
470 | for part in [1, 2, 3]: | 472 | for part in [1, 2, 3, 4]: |
471 | part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part) | 473 | part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part) |
472 | partln = partlns[part-1].split(":") | 474 | partln = partlns[part-1].split(":") |
473 | self.assertEqual(7, len(partln)) | 475 | self.assertEqual(7, len(partln)) |
@@ -478,16 +480,16 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r | |||
478 | 480 | ||
479 | # Test partition 1, should contain the normal root directories, except | 481 | # Test partition 1, should contain the normal root directories, except |
480 | # /usr. | 482 | # /usr. |
481 | res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \ | 483 | res = runCmd("debugfs -R 'ls -p' %s" % \ |
482 | os.path.join(self.resultdir, "selftest_img.part1")) | 484 | os.path.join(self.resultdir, "selftest_img.part1"), stderr=subprocess.PIPE) |
483 | files = extract_files(res.output) | 485 | files = extract_files(res.output) |
484 | self.assertIn("etc", files) | 486 | self.assertIn("etc", files) |
485 | self.assertNotIn("usr", files) | 487 | self.assertNotIn("usr", files) |
486 | 488 | ||
487 | # Partition 2, should contain common directories for /usr, not root | 489 | # Partition 2, should contain common directories for /usr, not root |
488 | # directories. | 490 | # directories. |
489 | res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \ | 491 | res = runCmd("debugfs -R 'ls -p' %s" % \ |
490 | os.path.join(self.resultdir, "selftest_img.part2")) | 492 | os.path.join(self.resultdir, "selftest_img.part2"), stderr=subprocess.PIPE) |
491 | files = extract_files(res.output) | 493 | files = extract_files(res.output) |
492 | self.assertNotIn("etc", files) | 494 | self.assertNotIn("etc", files) |
493 | self.assertNotIn("usr", files) | 495 | self.assertNotIn("usr", files) |
@@ -495,27 +497,78 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r | |||
495 | 497 | ||
496 | # Partition 3, should contain the same as partition 2, including the bin | 498 | # Partition 3, should contain the same as partition 2, including the bin |
497 | # directory, but not the files inside it. | 499 | # directory, but not the files inside it. |
498 | res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \ | 500 | res = runCmd("debugfs -R 'ls -p' %s" % \ |
499 | os.path.join(self.resultdir, "selftest_img.part3")) | 501 | os.path.join(self.resultdir, "selftest_img.part3"), stderr=subprocess.PIPE) |
500 | files = extract_files(res.output) | 502 | files = extract_files(res.output) |
501 | self.assertNotIn("etc", files) | 503 | self.assertNotIn("etc", files) |
502 | self.assertNotIn("usr", files) | 504 | self.assertNotIn("usr", files) |
503 | self.assertIn("share", files) | 505 | self.assertIn("share", files) |
504 | self.assertIn("bin", files) | 506 | self.assertIn("bin", files) |
505 | res = runCmd("debugfs -R 'ls -p bin' %s 2>/dev/null" % \ | 507 | res = runCmd("debugfs -R 'ls -p bin' %s" % \ |
506 | os.path.join(self.resultdir, "selftest_img.part3")) | 508 | os.path.join(self.resultdir, "selftest_img.part3"), stderr=subprocess.PIPE) |
507 | files = extract_files(res.output) | 509 | files = extract_files(res.output) |
508 | self.assertIn(".", files) | 510 | self.assertIn(".", files) |
509 | self.assertIn("..", files) | 511 | self.assertIn("..", files) |
510 | self.assertEqual(2, len(files)) | 512 | self.assertEqual(2, len(files)) |
511 | 513 | ||
512 | for part in [1, 2, 3]: | 514 | # Partition 4, should contain the same as partition 2, including the bin |
515 | # directory, but not whoami (a symlink to busybox.nosuid) inside it. | ||
516 | res = runCmd("debugfs -R 'ls -p' %s" % \ | ||
517 | os.path.join(self.resultdir, "selftest_img.part4"), stderr=subprocess.PIPE) | ||
518 | files = extract_files(res.output) | ||
519 | self.assertNotIn("etc", files) | ||
520 | self.assertNotIn("usr", files) | ||
521 | self.assertIn("share", files) | ||
522 | self.assertIn("bin", files) | ||
523 | res = runCmd("debugfs -R 'ls -p bin' %s" % \ | ||
524 | os.path.join(self.resultdir, "selftest_img.part4"), stderr=subprocess.PIPE) | ||
525 | files = extract_files(res.output) | ||
526 | self.assertIn(".", files) | ||
527 | self.assertIn("..", files) | ||
528 | self.assertIn("who", files) | ||
529 | self.assertNotIn("whoami", files) | ||
530 | |||
531 | for part in [1, 2, 3, 4]: | ||
513 | part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part) | 532 | part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part) |
514 | os.remove(part_file) | 533 | os.remove(part_file) |
515 | 534 | ||
516 | finally: | 535 | finally: |
517 | os.environ['PATH'] = oldpath | 536 | os.environ['PATH'] = oldpath |
518 | 537 | ||
538 | def test_exclude_path_with_extra_space(self): | ||
539 | """Test having --exclude-path with IMAGE_ROOTFS_EXTRA_SPACE. [Yocto #15555]""" | ||
540 | |||
541 | with NamedTemporaryFile("w", suffix=".wks") as wks: | ||
542 | wks.writelines( | ||
543 | ['bootloader --ptable gpt\n', | ||
544 | 'part /boot --size=100M --active --fstype=ext4 --label boot\n', | ||
545 | 'part / --source rootfs --fstype=ext4 --label root --exclude-path boot/\n']) | ||
546 | wks.flush() | ||
547 | config = 'IMAGE_ROOTFS_EXTRA_SPACE = "500000"\n'\ | ||
548 | 'DEPENDS:pn-core-image-minimal += "wic-tools"\n'\ | ||
549 | 'IMAGE_FSTYPES += "wic ext4"\n'\ | ||
550 | 'WKS_FILE = "%s"\n' % wks.name | ||
551 | self.append_config(config) | ||
552 | bitbake('core-image-minimal') | ||
553 | |||
554 | """ | ||
555 | the output of "wic ls <image>.wic" will look something like: | ||
556 | Num Start End Size Fstype | ||
557 | 1 17408 136332287 136314880 ext4 | ||
558 | 2 136332288 171464703 35132416 ext4 | ||
559 | we are looking for the size of partition 2 | ||
560 | i.e. in this case the number 35,132,416 | ||
561 | without the fix the size will be around 85,403,648 | ||
562 | with the fix the size should be around 799,960,064 | ||
563 | """ | ||
564 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE'], 'core-image-minimal') | ||
565 | deploy_dir = bb_vars['DEPLOY_DIR_IMAGE'] | ||
566 | machine = bb_vars['MACHINE'] | ||
567 | nativesysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | ||
568 | wicout = glob(os.path.join(deploy_dir, "core-image-minimal-%s.rootfs-*.wic" % machine))[0] | ||
569 | size_of_root_partition = int(runCmd("wic ls %s --native-sysroot %s" % (wicout, nativesysroot)).output.split('\n')[2].split()[3]) | ||
570 | self.assertGreater(size_of_root_partition, 500000000) | ||
571 | |||
519 | def test_include_path(self): | 572 | def test_include_path(self): |
520 | """Test --include-path wks option.""" | 573 | """Test --include-path wks option.""" |
521 | 574 | ||
@@ -541,13 +594,13 @@ part /part2 --source rootfs --ondisk mmcblk0 --fstype=ext4 --include-path %s""" | |||
541 | part2 = glob(os.path.join(self.resultdir, 'temp-*.direct.p2'))[0] | 594 | part2 = glob(os.path.join(self.resultdir, 'temp-*.direct.p2'))[0] |
542 | 595 | ||
543 | # Test partition 1, should not contain 'test-file' | 596 | # Test partition 1, should not contain 'test-file' |
544 | res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part1)) | 597 | res = runCmd("debugfs -R 'ls -p' %s" % (part1), stderr=subprocess.PIPE) |
545 | files = extract_files(res.output) | 598 | files = extract_files(res.output) |
546 | self.assertNotIn('test-file', files) | 599 | self.assertNotIn('test-file', files) |
547 | self.assertEqual(True, files_own_by_root(res.output)) | 600 | self.assertEqual(True, files_own_by_root(res.output)) |
548 | 601 | ||
549 | # Test partition 2, should contain 'test-file' | 602 | # Test partition 2, should contain 'test-file' |
550 | res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part2)) | 603 | res = runCmd("debugfs -R 'ls -p' %s" % (part2), stderr=subprocess.PIPE) |
551 | files = extract_files(res.output) | 604 | files = extract_files(res.output) |
552 | self.assertIn('test-file', files) | 605 | self.assertIn('test-file', files) |
553 | self.assertEqual(True, files_own_by_root(res.output)) | 606 | self.assertEqual(True, files_own_by_root(res.output)) |
@@ -576,12 +629,12 @@ part / --source rootfs --fstype=ext4 --include-path %s --include-path core-imag | |||
576 | 629 | ||
577 | part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0] | 630 | part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0] |
578 | 631 | ||
579 | res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part1)) | 632 | res = runCmd("debugfs -R 'ls -p' %s" % (part1), stderr=subprocess.PIPE) |
580 | files = extract_files(res.output) | 633 | files = extract_files(res.output) |
581 | self.assertIn('test-file', files) | 634 | self.assertIn('test-file', files) |
582 | self.assertEqual(True, files_own_by_root(res.output)) | 635 | self.assertEqual(True, files_own_by_root(res.output)) |
583 | 636 | ||
584 | res = runCmd("debugfs -R 'ls -p /export/etc/' %s 2>/dev/null" % (part1)) | 637 | res = runCmd("debugfs -R 'ls -p /export/etc/' %s" % (part1), stderr=subprocess.PIPE) |
585 | files = extract_files(res.output) | 638 | files = extract_files(res.output) |
586 | self.assertIn('passwd', files) | 639 | self.assertIn('passwd', files) |
587 | self.assertEqual(True, files_own_by_root(res.output)) | 640 | self.assertEqual(True, files_own_by_root(res.output)) |
@@ -668,7 +721,7 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc | |||
668 | % (wks_file, self.resultdir)) | 721 | % (wks_file, self.resultdir)) |
669 | 722 | ||
670 | for part in glob(os.path.join(self.resultdir, 'temp-*.direct.p*')): | 723 | for part in glob(os.path.join(self.resultdir, 'temp-*.direct.p*')): |
671 | res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part)) | 724 | res = runCmd("debugfs -R 'ls -p' %s" % (part), stderr=subprocess.PIPE) |
672 | self.assertEqual(True, files_own_by_root(res.output)) | 725 | self.assertEqual(True, files_own_by_root(res.output)) |
673 | 726 | ||
674 | config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "%s"\n' % wks_file | 727 | config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "%s"\n' % wks_file |
@@ -678,7 +731,7 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc | |||
678 | 731 | ||
679 | # check each partition for permission | 732 | # check each partition for permission |
680 | for part in glob(os.path.join(tmpdir, 'temp-*.direct.p*')): | 733 | for part in glob(os.path.join(tmpdir, 'temp-*.direct.p*')): |
681 | res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part)) | 734 | res = runCmd("debugfs -R 'ls -p' %s" % (part), stderr=subprocess.PIPE) |
682 | self.assertTrue(files_own_by_root(res.output) | 735 | self.assertTrue(files_own_by_root(res.output) |
683 | ,msg='Files permission incorrect using wks set "%s"' % test) | 736 | ,msg='Files permission incorrect using wks set "%s"' % test) |
684 | 737 | ||
@@ -706,7 +759,7 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc | |||
706 | 759 | ||
707 | part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0] | 760 | part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0] |
708 | 761 | ||
709 | res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part1)) | 762 | res = runCmd("debugfs -R 'ls -p' %s" % (part1), stderr=subprocess.PIPE) |
710 | files = extract_files(res.output) | 763 | files = extract_files(res.output) |
711 | self.assertIn('passwd', files) | 764 | self.assertIn('passwd', files) |
712 | 765 | ||
@@ -741,7 +794,7 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc | |||
741 | bitbake('base-files -c do_install') | 794 | bitbake('base-files -c do_install') |
742 | bf_fstab = os.path.join(get_bb_var('D', 'base-files'), 'etc', 'fstab') | 795 | bf_fstab = os.path.join(get_bb_var('D', 'base-files'), 'etc', 'fstab') |
743 | self.assertEqual(True, os.path.exists(bf_fstab)) | 796 | self.assertEqual(True, os.path.exists(bf_fstab)) |
744 | bf_fstab_md5sum = runCmd('md5sum %s 2>/dev/null' % bf_fstab).output.split(" ")[0] | 797 | bf_fstab_md5sum = runCmd('md5sum %s ' % bf_fstab).output.split(" ")[0] |
745 | 798 | ||
746 | try: | 799 | try: |
747 | no_fstab_update_path = os.path.join(self.resultdir, 'test-no-fstab-update') | 800 | no_fstab_update_path = os.path.join(self.resultdir, 'test-no-fstab-update') |
@@ -757,7 +810,7 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc | |||
757 | part_fstab_md5sum = [] | 810 | part_fstab_md5sum = [] |
758 | for i in range(1, 3): | 811 | for i in range(1, 3): |
759 | part = glob(os.path.join(self.resultdir, 'temp-*.direct.p') + str(i))[0] | 812 | part = glob(os.path.join(self.resultdir, 'temp-*.direct.p') + str(i))[0] |
760 | part_fstab = runCmd("debugfs -R 'cat etc/fstab' %s 2>/dev/null" % (part)) | 813 | part_fstab = runCmd("debugfs -R 'cat etc/fstab' %s" % (part), stderr=subprocess.PIPE) |
761 | part_fstab_md5sum.append(hashlib.md5((part_fstab.output + "\n\n").encode('utf-8')).hexdigest()) | 814 | part_fstab_md5sum.append(hashlib.md5((part_fstab.output + "\n\n").encode('utf-8')).hexdigest()) |
762 | 815 | ||
763 | # '/etc/fstab' in partition 2 should contain the same stock fstab file | 816 | # '/etc/fstab' in partition 2 should contain the same stock fstab file |
@@ -839,6 +892,61 @@ bootloader --ptable gpt""") | |||
839 | finally: | 892 | finally: |
840 | os.remove(wks_file) | 893 | os.remove(wks_file) |
841 | 894 | ||
895 | def test_wic_sector_size(self): | ||
896 | """Test generation image sector size""" | ||
897 | |||
898 | oldpath = os.environ['PATH'] | ||
899 | os.environ['PATH'] = get_bb_var("PATH", "wic-tools") | ||
900 | |||
901 | try: | ||
902 | # Add WIC_SECTOR_SIZE into config | ||
903 | config = 'WIC_SECTOR_SIZE = "4096"\n'\ | ||
904 | 'WICVARS:append = " WIC_SECTOR_SIZE"\n' | ||
905 | self.append_config(config) | ||
906 | bitbake('core-image-minimal') | ||
907 | |||
908 | # Check WIC_SECTOR_SIZE apply to bitbake variable | ||
909 | wic_sector_size_str = get_bb_var('WIC_SECTOR_SIZE', 'core-image-minimal') | ||
910 | wic_sector_size = int(wic_sector_size_str) | ||
911 | self.assertEqual(4096, wic_sector_size) | ||
912 | |||
913 | self.logger.info("Test wic_sector_size: %d \n" % wic_sector_size) | ||
914 | |||
915 | with NamedTemporaryFile("w", suffix=".wks") as wks: | ||
916 | wks.writelines( | ||
917 | ['bootloader --ptable gpt\n', | ||
918 | 'part --fstype ext4 --source rootfs --label rofs-a --mkfs-extraopts "-b 4096"\n', | ||
919 | 'part --fstype ext4 --source rootfs --use-uuid --mkfs-extraopts "-b 4096"\n']) | ||
920 | wks.flush() | ||
921 | cmd = "wic create %s -e core-image-minimal -o %s" % (wks.name, self.resultdir) | ||
922 | runCmd(cmd) | ||
923 | wksname = os.path.splitext(os.path.basename(wks.name))[0] | ||
924 | images = glob(os.path.join(self.resultdir, "%s-*direct" % wksname)) | ||
925 | self.assertEqual(1, len(images)) | ||
926 | |||
927 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | ||
928 | # list partitions | ||
929 | result = runCmd("wic ls %s -n %s" % (images[0], sysroot)) | ||
930 | self.assertEqual(3, len(result.output.split('\n'))) | ||
931 | |||
932 | # verify partition size with wic | ||
933 | res = runCmd("export PARTED_SECTOR_SIZE=%d; parted -m %s unit b p" % (wic_sector_size, images[0]), | ||
934 | stderr=subprocess.PIPE) | ||
935 | |||
936 | # parse parted output which looks like this: | ||
937 | # BYT;\n | ||
938 | # /var/tmp/wic/build/tmpgjzzefdd-202410281021-sda.direct:78569472B:file:4096:4096:gpt::;\n | ||
939 | # 1:139264B:39284735B:39145472B:ext4:rofs-a:;\n | ||
940 | # 2:39284736B:78430207B:39145472B:ext4:primary:;\n | ||
941 | disk_info = res.output.splitlines()[1] | ||
942 | # Check sector sizes | ||
943 | sector_size_logical = int(disk_info.split(":")[3]) | ||
944 | sector_size_physical = int(disk_info.split(":")[4]) | ||
945 | self.assertEqual(wic_sector_size, sector_size_logical, "Logical sector size is not %d." % wic_sector_size) | ||
946 | self.assertEqual(wic_sector_size, sector_size_physical, "Physical sector size is not %d." % wic_sector_size) | ||
947 | |||
948 | finally: | ||
949 | os.environ['PATH'] = oldpath | ||
842 | 950 | ||
843 | class Wic2(WicTestCase): | 951 | class Wic2(WicTestCase): |
844 | 952 | ||
@@ -913,6 +1021,18 @@ class Wic2(WicTestCase): | |||
913 | """Test building wic images by bitbake""" | 1021 | """Test building wic images by bitbake""" |
914 | config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ | 1022 | config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ |
915 | 'MACHINE_FEATURES:append = " efi"\n' | 1023 | 'MACHINE_FEATURES:append = " efi"\n' |
1024 | image_recipe_append = """ | ||
1025 | do_image_wic[postfuncs] += "run_wic_cmd" | ||
1026 | run_wic_cmd() { | ||
1027 | echo "test" >> ${WORKDIR}/test.wic-cp | ||
1028 | wic cp --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${WORKDIR}/test.wic-cp ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/ | ||
1029 | wic ls --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/ | ||
1030 | wic rm --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/test.wic-cp | ||
1031 | wic cp --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${WORKDIR}/test.wic-cp ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/ | ||
1032 | } | ||
1033 | """ | ||
1034 | self.write_recipeinc('images', image_recipe_append) | ||
1035 | |||
916 | self.append_config(config) | 1036 | self.append_config(config) |
917 | image = 'wic-image-minimal' | 1037 | image = 'wic-image-minimal' |
918 | bitbake(image) | 1038 | bitbake(image) |
@@ -921,6 +1041,11 @@ class Wic2(WicTestCase): | |||
921 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image) | 1041 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image) |
922 | prefix = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.' % bb_vars['IMAGE_LINK_NAME']) | 1042 | prefix = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.' % bb_vars['IMAGE_LINK_NAME']) |
923 | 1043 | ||
1044 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | ||
1045 | # check if file is there | ||
1046 | result = runCmd("wic ls %s:1/ -n %s" % (prefix+"wic", sysroot)) | ||
1047 | self.assertIn("test.wic-cp", result.output) | ||
1048 | |||
924 | # check if we have result image and manifests symlinks | 1049 | # check if we have result image and manifests symlinks |
925 | # pointing to existing files | 1050 | # pointing to existing files |
926 | for suffix in ('wic', 'manifest'): | 1051 | for suffix in ('wic', 'manifest'): |
@@ -936,10 +1061,29 @@ class Wic2(WicTestCase): | |||
936 | config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ | 1061 | config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ |
937 | 'MACHINE_FEATURES:append = " efi"\n' | 1062 | 'MACHINE_FEATURES:append = " efi"\n' |
938 | self.append_config(config) | 1063 | self.append_config(config) |
1064 | image_recipe_append = """ | ||
1065 | do_image_wic[postfuncs] += "run_wic_cmd" | ||
1066 | run_wic_cmd() { | ||
1067 | echo "test" >> ${WORKDIR}/test.wic-cp | ||
1068 | wic cp --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${WORKDIR}/test.wic-cp ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/ | ||
1069 | wic ls --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/ | ||
1070 | wic rm --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/test.wic-cp | ||
1071 | wic cp --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${WORKDIR}/test.wic-cp ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/ | ||
1072 | } | ||
1073 | """ | ||
1074 | self.write_recipeinc('images', image_recipe_append) | ||
939 | bitbake('wic-image-minimal') | 1075 | bitbake('wic-image-minimal') |
1076 | |||
1077 | sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') | ||
1078 | bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], "wic-image-minimal") | ||
1079 | image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], bb_vars['IMAGE_LINK_NAME']) | ||
1080 | # check if file is there | ||
1081 | result = runCmd("wic ls %s:1/ -n %s" % (image_path+".wic", sysroot)) | ||
1082 | self.assertIn("test.wic-cp", result.output) | ||
940 | self.remove_config(config) | 1083 | self.remove_config(config) |
941 | 1084 | ||
942 | with runqemu('wic-image-minimal', ssh=False, runqemuparams='nographic') as qemu: | 1085 | runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'wic-image-minimal') or "" |
1086 | with runqemu('wic-image-minimal', ssh=False, runqemuparams='%s nographic' % (runqemu_params)) as qemu: | ||
943 | cmd = "mount | grep '^/dev/' | cut -f1,3 -d ' ' | egrep -c -e '/dev/sda1 /boot' " \ | 1087 | cmd = "mount | grep '^/dev/' | cut -f1,3 -d ' ' | egrep -c -e '/dev/sda1 /boot' " \ |
944 | "-e '/dev/root /|/dev/sda2 /' -e '/dev/sda3 /media' -e '/dev/sda4 /mnt'" | 1088 | "-e '/dev/root /|/dev/sda2 /' -e '/dev/sda3 /media' -e '/dev/sda4 /mnt'" |
945 | status, output = qemu.run_serial(cmd) | 1089 | status, output = qemu.run_serial(cmd) |
@@ -959,8 +1103,9 @@ class Wic2(WicTestCase): | |||
959 | bitbake('core-image-minimal ovmf') | 1103 | bitbake('core-image-minimal ovmf') |
960 | self.remove_config(config) | 1104 | self.remove_config(config) |
961 | 1105 | ||
1106 | runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-minimal') or "" | ||
962 | with runqemu('core-image-minimal', ssh=False, | 1107 | with runqemu('core-image-minimal', ssh=False, |
963 | runqemuparams='nographic ovmf', image_fstype='wic') as qemu: | 1108 | runqemuparams='%s nographic ovmf' % (runqemu_params), image_fstype='wic') as qemu: |
964 | cmd = "grep sda. /proc/partitions |wc -l" | 1109 | cmd = "grep sda. /proc/partitions |wc -l" |
965 | status, output = qemu.run_serial(cmd) | 1110 | status, output = qemu.run_serial(cmd) |
966 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | 1111 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) |
@@ -1000,8 +1145,8 @@ class Wic2(WicTestCase): | |||
1000 | native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "wic-tools") | 1145 | native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "wic-tools") |
1001 | 1146 | ||
1002 | # verify partition size with wic | 1147 | # verify partition size with wic |
1003 | res = runCmd("parted -m %s unit kib p 2>/dev/null" % wicimg, | 1148 | res = runCmd("parted -m %s unit kib p" % wicimg, |
1004 | native_sysroot=native_sysroot) | 1149 | native_sysroot=native_sysroot, stderr=subprocess.PIPE) |
1005 | 1150 | ||
1006 | # parse parted output which looks like this: | 1151 | # parse parted output which looks like this: |
1007 | # BYT;\n | 1152 | # BYT;\n |
@@ -1040,71 +1185,71 @@ class Wic2(WicTestCase): | |||
1040 | with NamedTemporaryFile("w", suffix=".wks") as tempf: | 1185 | with NamedTemporaryFile("w", suffix=".wks") as tempf: |
1041 | # Test that partitions are placed at the correct offsets, default KB | 1186 | # Test that partitions are placed at the correct offsets, default KB |
1042 | tempf.write("bootloader --ptable gpt\n" \ | 1187 | tempf.write("bootloader --ptable gpt\n" \ |
1043 | "part / --source rootfs --ondisk hda --offset 32 --fixed-size 100M --fstype=ext4\n" \ | 1188 | "part / --source rootfs --ondisk hda --offset 32 --fixed-size 200M --fstype=ext4\n" \ |
1044 | "part /bar --ondisk hda --offset 102432 --fixed-size 100M --fstype=ext4\n") | 1189 | "part /bar --ondisk hda --offset 204832 --fixed-size 100M --fstype=ext4\n") |
1045 | tempf.flush() | 1190 | tempf.flush() |
1046 | 1191 | ||
1047 | _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) | 1192 | _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) |
1048 | self.assertEqual(partlns, [ | 1193 | self.assertEqual(partlns, [ |
1049 | "1:32.0kiB:102432kiB:102400kiB:ext4:primary:;", | 1194 | "1:32.0kiB:204832kiB:204800kiB:ext4:primary:;", |
1050 | "2:102432kiB:204832kiB:102400kiB:ext4:primary:;", | 1195 | "2:204832kiB:307232kiB:102400kiB:ext4:primary:;", |
1051 | ]) | 1196 | ]) |
1052 | 1197 | ||
1053 | with NamedTemporaryFile("w", suffix=".wks") as tempf: | 1198 | with NamedTemporaryFile("w", suffix=".wks") as tempf: |
1054 | # Test that partitions are placed at the correct offsets, same with explicit KB | 1199 | # Test that partitions are placed at the correct offsets, same with explicit KB |
1055 | tempf.write("bootloader --ptable gpt\n" \ | 1200 | tempf.write("bootloader --ptable gpt\n" \ |
1056 | "part / --source rootfs --ondisk hda --offset 32K --fixed-size 100M --fstype=ext4\n" \ | 1201 | "part / --source rootfs --ondisk hda --offset 32K --fixed-size 200M --fstype=ext4\n" \ |
1057 | "part /bar --ondisk hda --offset 102432K --fixed-size 100M --fstype=ext4\n") | 1202 | "part /bar --ondisk hda --offset 204832K --fixed-size 100M --fstype=ext4\n") |
1058 | tempf.flush() | 1203 | tempf.flush() |
1059 | 1204 | ||
1060 | _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) | 1205 | _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) |
1061 | self.assertEqual(partlns, [ | 1206 | self.assertEqual(partlns, [ |
1062 | "1:32.0kiB:102432kiB:102400kiB:ext4:primary:;", | 1207 | "1:32.0kiB:204832kiB:204800kiB:ext4:primary:;", |
1063 | "2:102432kiB:204832kiB:102400kiB:ext4:primary:;", | 1208 | "2:204832kiB:307232kiB:102400kiB:ext4:primary:;", |
1064 | ]) | 1209 | ]) |
1065 | 1210 | ||
1066 | with NamedTemporaryFile("w", suffix=".wks") as tempf: | 1211 | with NamedTemporaryFile("w", suffix=".wks") as tempf: |
1067 | # Test that partitions are placed at the correct offsets using MB | 1212 | # Test that partitions are placed at the correct offsets using MB |
1068 | tempf.write("bootloader --ptable gpt\n" \ | 1213 | tempf.write("bootloader --ptable gpt\n" \ |
1069 | "part / --source rootfs --ondisk hda --offset 32K --fixed-size 100M --fstype=ext4\n" \ | 1214 | "part / --source rootfs --ondisk hda --offset 32K --fixed-size 200M --fstype=ext4\n" \ |
1070 | "part /bar --ondisk hda --offset 101M --fixed-size 100M --fstype=ext4\n") | 1215 | "part /bar --ondisk hda --offset 201M --fixed-size 100M --fstype=ext4\n") |
1071 | tempf.flush() | 1216 | tempf.flush() |
1072 | 1217 | ||
1073 | _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) | 1218 | _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) |
1074 | self.assertEqual(partlns, [ | 1219 | self.assertEqual(partlns, [ |
1075 | "1:32.0kiB:102432kiB:102400kiB:ext4:primary:;", | 1220 | "1:32.0kiB:204832kiB:204800kiB:ext4:primary:;", |
1076 | "2:103424kiB:205824kiB:102400kiB:ext4:primary:;", | 1221 | "2:205824kiB:308224kiB:102400kiB:ext4:primary:;", |
1077 | ]) | 1222 | ]) |
1078 | 1223 | ||
1079 | with NamedTemporaryFile("w", suffix=".wks") as tempf: | 1224 | with NamedTemporaryFile("w", suffix=".wks") as tempf: |
1080 | # Test that partitions can be placed on a 512 byte sector boundary | 1225 | # Test that partitions can be placed on a 512 byte sector boundary |
1081 | tempf.write("bootloader --ptable gpt\n" \ | 1226 | tempf.write("bootloader --ptable gpt\n" \ |
1082 | "part / --source rootfs --ondisk hda --offset 65s --fixed-size 99M --fstype=ext4\n" \ | 1227 | "part / --source rootfs --ondisk hda --offset 65s --fixed-size 199M --fstype=ext4\n" \ |
1083 | "part /bar --ondisk hda --offset 102432 --fixed-size 100M --fstype=ext4\n") | 1228 | "part /bar --ondisk hda --offset 204832 --fixed-size 100M --fstype=ext4\n") |
1084 | tempf.flush() | 1229 | tempf.flush() |
1085 | 1230 | ||
1086 | _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) | 1231 | _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) |
1087 | self.assertEqual(partlns, [ | 1232 | self.assertEqual(partlns, [ |
1088 | "1:32.5kiB:101408kiB:101376kiB:ext4:primary:;", | 1233 | "1:32.5kiB:203808kiB:203776kiB:ext4:primary:;", |
1089 | "2:102432kiB:204832kiB:102400kiB:ext4:primary:;", | 1234 | "2:204832kiB:307232kiB:102400kiB:ext4:primary:;", |
1090 | ]) | 1235 | ]) |
1091 | 1236 | ||
1092 | with NamedTemporaryFile("w", suffix=".wks") as tempf: | 1237 | with NamedTemporaryFile("w", suffix=".wks") as tempf: |
1093 | # Test that a partition can be placed immediately after a MSDOS partition table | 1238 | # Test that a partition can be placed immediately after a MSDOS partition table |
1094 | tempf.write("bootloader --ptable msdos\n" \ | 1239 | tempf.write("bootloader --ptable msdos\n" \ |
1095 | "part / --source rootfs --ondisk hda --offset 1s --fixed-size 100M --fstype=ext4\n") | 1240 | "part / --source rootfs --ondisk hda --offset 1s --fixed-size 200M --fstype=ext4\n") |
1096 | tempf.flush() | 1241 | tempf.flush() |
1097 | 1242 | ||
1098 | _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) | 1243 | _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) |
1099 | self.assertEqual(partlns, [ | 1244 | self.assertEqual(partlns, [ |
1100 | "1:0.50kiB:102400kiB:102400kiB:ext4::;", | 1245 | "1:0.50kiB:204800kiB:204800kiB:ext4::;", |
1101 | ]) | 1246 | ]) |
1102 | 1247 | ||
1103 | with NamedTemporaryFile("w", suffix=".wks") as tempf: | 1248 | with NamedTemporaryFile("w", suffix=".wks") as tempf: |
1104 | # Test that image creation fails if the partitions would overlap | 1249 | # Test that image creation fails if the partitions would overlap |
1105 | tempf.write("bootloader --ptable gpt\n" \ | 1250 | tempf.write("bootloader --ptable gpt\n" \ |
1106 | "part / --source rootfs --ondisk hda --offset 32 --fixed-size 100M --fstype=ext4\n" \ | 1251 | "part / --source rootfs --ondisk hda --offset 32 --fixed-size 200M --fstype=ext4\n" \ |
1107 | "part /bar --ondisk hda --offset 102431 --fixed-size 100M --fstype=ext4\n") | 1252 | "part /bar --ondisk hda --offset 204831 --fixed-size 100M --fstype=ext4\n") |
1108 | tempf.flush() | 1253 | tempf.flush() |
1109 | 1254 | ||
1110 | p, _ = self._get_wic_partitions(tempf.name, ignore_status=True) | 1255 | p, _ = self._get_wic_partitions(tempf.name, ignore_status=True) |
@@ -1113,7 +1258,7 @@ class Wic2(WicTestCase): | |||
1113 | with NamedTemporaryFile("w", suffix=".wks") as tempf: | 1258 | with NamedTemporaryFile("w", suffix=".wks") as tempf: |
1114 | # Test that partitions are not allowed to overlap with the booloader | 1259 | # Test that partitions are not allowed to overlap with the booloader |
1115 | tempf.write("bootloader --ptable gpt\n" \ | 1260 | tempf.write("bootloader --ptable gpt\n" \ |
1116 | "part / --source rootfs --ondisk hda --offset 8 --fixed-size 100M --fstype=ext4\n") | 1261 | "part / --source rootfs --ondisk hda --offset 8 --fixed-size 200M --fstype=ext4\n") |
1117 | tempf.flush() | 1262 | tempf.flush() |
1118 | 1263 | ||
1119 | p, _ = self._get_wic_partitions(tempf.name, ignore_status=True) | 1264 | p, _ = self._get_wic_partitions(tempf.name, ignore_status=True) |
@@ -1154,8 +1299,9 @@ class Wic2(WicTestCase): | |||
1154 | bitbake('core-image-minimal-mtdutils') | 1299 | bitbake('core-image-minimal-mtdutils') |
1155 | self.remove_config(config) | 1300 | self.remove_config(config) |
1156 | 1301 | ||
1302 | runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-minimal-mtdutils') or "" | ||
1157 | with runqemu('core-image-minimal-mtdutils', ssh=False, | 1303 | with runqemu('core-image-minimal-mtdutils', ssh=False, |
1158 | runqemuparams='nographic', image_fstype='wic') as qemu: | 1304 | runqemuparams='%s nographic' % (runqemu_params), image_fstype='wic') as qemu: |
1159 | cmd = "grep sda. /proc/partitions |wc -l" | 1305 | cmd = "grep sda. /proc/partitions |wc -l" |
1160 | status, output = qemu.run_serial(cmd) | 1306 | status, output = qemu.run_serial(cmd) |
1161 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | 1307 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) |
@@ -1177,6 +1323,10 @@ class Wic2(WicTestCase): | |||
1177 | self.assertEqual(1, len(out)) | 1323 | self.assertEqual(1, len(out)) |
1178 | 1324 | ||
1179 | def test_rawcopy_plugin(self): | 1325 | def test_rawcopy_plugin(self): |
1326 | config = 'IMAGE_FSTYPES = "ext4"\n' | ||
1327 | self.append_config(config) | ||
1328 | self.assertEqual(0, bitbake('core-image-minimal').status) | ||
1329 | self.remove_config(config) | ||
1180 | self._rawcopy_plugin('ext4') | 1330 | self._rawcopy_plugin('ext4') |
1181 | 1331 | ||
1182 | def test_rawcopy_plugin_unpack(self): | 1332 | def test_rawcopy_plugin_unpack(self): |
@@ -1214,8 +1364,9 @@ class Wic2(WicTestCase): | |||
1214 | bitbake('core-image-minimal') | 1364 | bitbake('core-image-minimal') |
1215 | self.remove_config(config) | 1365 | self.remove_config(config) |
1216 | 1366 | ||
1367 | runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-minimal') or "" | ||
1217 | with runqemu('core-image-minimal', ssh=False, | 1368 | with runqemu('core-image-minimal', ssh=False, |
1218 | runqemuparams='nographic', image_fstype='wic') as qemu: | 1369 | runqemuparams='%s nographic' % (runqemu_params), image_fstype='wic') as qemu: |
1219 | # Check that we have ONLY two /dev/sda* partitions (/boot and /) | 1370 | # Check that we have ONLY two /dev/sda* partitions (/boot and /) |
1220 | cmd = "grep sda. /proc/partitions | wc -l" | 1371 | cmd = "grep sda. /proc/partitions | wc -l" |
1221 | status, output = qemu.run_serial(cmd) | 1372 | status, output = qemu.run_serial(cmd) |
@@ -1242,7 +1393,7 @@ class Wic2(WicTestCase): | |||
1242 | def test_biosplusefi_plugin(self): | 1393 | def test_biosplusefi_plugin(self): |
1243 | """Test biosplusefi plugin""" | 1394 | """Test biosplusefi plugin""" |
1244 | # Wic generation below may fail depending on the order of the unittests | 1395 | # Wic generation below may fail depending on the order of the unittests |
1245 | # This is because bootimg-pcbios (that bootimg-biosplusefi uses) generate its MBR inside STAGING_DATADIR directory | 1396 | # This is because bootimg_pcbios (that bootimg_biosplusefi uses) generate its MBR inside STAGING_DATADIR directory |
1246 | # which may or may not exists depending on what was built already | 1397 | # which may or may not exists depending on what was built already |
1247 | # If an image hasn't been built yet, directory ${STAGING_DATADIR}/syslinux won't exists and _get_bootimg_dir() | 1398 | # If an image hasn't been built yet, directory ${STAGING_DATADIR}/syslinux won't exists and _get_bootimg_dir() |
1248 | # will raise with "Couldn't find correct bootimg_dir" | 1399 | # will raise with "Couldn't find correct bootimg_dir" |
@@ -1254,7 +1405,7 @@ class Wic2(WicTestCase): | |||
1254 | 1405 | ||
1255 | img = 'core-image-minimal' | 1406 | img = 'core-image-minimal' |
1256 | with NamedTemporaryFile("w", suffix=".wks") as wks: | 1407 | with NamedTemporaryFile("w", suffix=".wks") as wks: |
1257 | wks.writelines(['part /boot --active --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\n', | 1408 | wks.writelines(['part /boot --active --source bootimg_biosplusefi --sourceparams="loader=grub-efi"\n', |
1258 | 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\ | 1409 | 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\ |
1259 | 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n']) | 1410 | 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n']) |
1260 | wks.flush() | 1411 | wks.flush() |
@@ -1274,7 +1425,7 @@ class Wic2(WicTestCase): | |||
1274 | 1425 | ||
1275 | img = 'core-image-minimal' | 1426 | img = 'core-image-minimal' |
1276 | with NamedTemporaryFile("w", suffix=".wks") as wks: | 1427 | with NamedTemporaryFile("w", suffix=".wks") as wks: |
1277 | wks.writelines(['part /boot --source bootimg-efi --sourceparams="loader=uefi-kernel"\n' | 1428 | wks.writelines(['part /boot --source bootimg_efi --sourceparams="loader=uefi-kernel"\n' |
1278 | 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\ | 1429 | 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\ |
1279 | 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n']) | 1430 | 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n']) |
1280 | wks.flush() | 1431 | wks.flush() |
@@ -1288,24 +1439,45 @@ class Wic2(WicTestCase): | |||
1288 | @skipIfNotArch(['i586', 'i686', 'x86_64']) | 1439 | @skipIfNotArch(['i586', 'i686', 'x86_64']) |
1289 | @OETestTag("runqemu") | 1440 | @OETestTag("runqemu") |
1290 | def test_efi_plugin_unified_kernel_image_qemu(self): | 1441 | def test_efi_plugin_unified_kernel_image_qemu(self): |
1291 | """Test efi plugin's Unified Kernel Image feature in qemu""" | 1442 | """Test Unified Kernel Image feature in qemu without systemd in initramfs or rootfs""" |
1292 | config = 'IMAGE_FSTYPES = "wic"\n'\ | 1443 | config = """ |
1293 | 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\ | 1444 | # efi firmware must load systemd-boot, not grub |
1294 | 'WKS_FILE = "test_efi_plugin.wks"\n'\ | 1445 | EFI_PROVIDER = "systemd-boot" |
1295 | 'MACHINE_FEATURES:append = " efi"\n' | 1446 | |
1447 | # image format must be wic, needs esp partition for firmware etc | ||
1448 | IMAGE_FSTYPES:pn-core-image-base:append = " wic" | ||
1449 | WKS_FILE = "test_efi_plugin.wks" | ||
1450 | |||
1451 | # efi, uki and systemd features must be enabled | ||
1452 | MACHINE_FEATURES:append = " efi" | ||
1453 | IMAGE_CLASSES:append:pn-core-image-base = " uki" | ||
1454 | |||
1455 | # uki embeds also an initrd, no systemd or udev | ||
1456 | INITRAMFS_IMAGE = "core-image-initramfs-boot" | ||
1457 | |||
1458 | # runqemu must not load kernel separately, it's in the uki | ||
1459 | QB_KERNEL_ROOT = "" | ||
1460 | QB_DEFAULT_KERNEL = "none" | ||
1461 | |||
1462 | # boot command line provided via uki, not via bootloader | ||
1463 | UKI_CMDLINE = "rootwait root=LABEL=root console=${KERNEL_CONSOLE}" | ||
1464 | |||
1465 | """ | ||
1296 | self.append_config(config) | 1466 | self.append_config(config) |
1297 | bitbake('core-image-minimal core-image-minimal-initramfs ovmf') | 1467 | bitbake('core-image-base ovmf') |
1468 | runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-base') or "" | ||
1469 | uki_filename = get_bb_var('UKI_FILENAME', 'core-image-base') | ||
1298 | self.remove_config(config) | 1470 | self.remove_config(config) |
1299 | 1471 | ||
1300 | with runqemu('core-image-minimal', ssh=False, | 1472 | with runqemu('core-image-base', ssh=False, |
1301 | runqemuparams='nographic ovmf', image_fstype='wic') as qemu: | 1473 | runqemuparams='%s nographic ovmf' % (runqemu_params), image_fstype='wic') as qemu: |
1302 | # Check that /boot has EFI bootx64.efi (required for EFI) | 1474 | # Check that /boot has EFI boot*.efi (required for EFI) |
1303 | cmd = "ls /boot/EFI/BOOT/bootx64.efi | wc -l" | 1475 | cmd = "ls /boot/EFI/BOOT/boot*.efi | wc -l" |
1304 | status, output = qemu.run_serial(cmd) | 1476 | status, output = qemu.run_serial(cmd) |
1305 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | 1477 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) |
1306 | self.assertEqual(output, '1') | 1478 | self.assertEqual(output, '1') |
1307 | # Check that /boot has EFI/Linux/linux.efi (required for Unified Kernel Images auto detection) | 1479 | # Check that /boot has EFI/Linux/${UKI_FILENAME} (required for Unified Kernel Images auto detection) |
1308 | cmd = "ls /boot/EFI/Linux/linux.efi | wc -l" | 1480 | cmd = "ls /boot/EFI/Linux/%s | wc -l" % (uki_filename) |
1309 | status, output = qemu.run_serial(cmd) | 1481 | status, output = qemu.run_serial(cmd) |
1310 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | 1482 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) |
1311 | self.assertEqual(output, '1') | 1483 | self.assertEqual(output, '1') |
@@ -1315,6 +1487,80 @@ class Wic2(WicTestCase): | |||
1315 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | 1487 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) |
1316 | self.assertEqual(output, '0') | 1488 | self.assertEqual(output, '0') |
1317 | 1489 | ||
1490 | @skipIfNotArch(['aarch64']) | ||
1491 | @OETestTag("runqemu") | ||
1492 | def test_efi_plugin_plain_systemd_boot_qemu_aarch64(self): | ||
1493 | """Test plain systemd-boot in qemu with systemd""" | ||
1494 | config = """ | ||
1495 | INIT_MANAGER = "systemd" | ||
1496 | EFI_PROVIDER = "systemd-boot" | ||
1497 | |||
1498 | # image format must be wic, needs esp partition for firmware etc | ||
1499 | IMAGE_FSTYPES:pn-core-image-base:append = " wic" | ||
1500 | WKS_FILE = "test_efi_plugin_plain_systemd-boot.wks" | ||
1501 | |||
1502 | INITRAMFS_IMAGE = "core-image-initramfs-boot" | ||
1503 | |||
1504 | # to configure runqemu | ||
1505 | IMAGE_CLASSES += "qemuboot" | ||
1506 | # u-boot efi firmware | ||
1507 | QB_DEFAULT_BIOS = "u-boot.bin" | ||
1508 | # need to use virtio, scsi not supported by u-boot by default | ||
1509 | QB_DRIVE_TYPE = "/dev/vd" | ||
1510 | |||
1511 | # disable kvm, breaks boot | ||
1512 | QEMU_USE_KVM = "" | ||
1513 | |||
1514 | IMAGE_CLASSES:remove = 'testimage' | ||
1515 | """ | ||
1516 | self.append_config(config) | ||
1517 | bitbake('core-image-base u-boot') | ||
1518 | runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-base') or "" | ||
1519 | |||
1520 | with runqemu('core-image-base', ssh=False, | ||
1521 | runqemuparams='%s nographic' % (runqemu_params), image_fstype='wic') as qemu: | ||
1522 | # Check that /boot has EFI boot*.efi (required for EFI) | ||
1523 | cmd = "ls /boot/EFI/BOOT/boot*.efi | wc -l" | ||
1524 | status, output = qemu.run_serial(cmd) | ||
1525 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
1526 | self.assertEqual(output, '1') | ||
1527 | # Check that boot.conf exists | ||
1528 | cmd = "cat /boot/loader/entries/boot.conf" | ||
1529 | status, output = qemu.run_serial(cmd) | ||
1530 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
1531 | self.remove_config(config) | ||
1532 | |||
1533 | @skipIfNotArch(['i586', 'i686', 'x86_64']) | ||
1534 | @OETestTag("runqemu") | ||
1535 | def test_efi_plugin_plain_systemd_boot_qemu_x86(self): | ||
1536 | """Test plain systemd-boot to systemd in qemu""" | ||
1537 | config = """ | ||
1538 | INIT_MANAGER = "systemd" | ||
1539 | EFI_PROVIDER = "systemd-boot" | ||
1540 | |||
1541 | # image format must be wic, needs esp partition for firmware etc | ||
1542 | IMAGE_FSTYPES:pn-core-image-base:append = " wic" | ||
1543 | WKS_FILE = "test_efi_plugin_plain_systemd-boot.wks" | ||
1544 | |||
1545 | INITRAMFS_IMAGE = "core-image-initramfs-boot" | ||
1546 | """ | ||
1547 | self.append_config(config) | ||
1548 | bitbake('core-image-base ovmf') | ||
1549 | runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-base') or "" | ||
1550 | self.remove_config(config) | ||
1551 | |||
1552 | with runqemu('core-image-base', ssh=False, | ||
1553 | runqemuparams='%s nographic ovmf' % (runqemu_params), image_fstype='wic') as qemu: | ||
1554 | # Check that /boot has EFI boot*.efi (required for EFI) | ||
1555 | cmd = "ls /boot/EFI/BOOT/boot*.efi | wc -l" | ||
1556 | status, output = qemu.run_serial(cmd) | ||
1557 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
1558 | self.assertEqual(output, '1') | ||
1559 | # Check that boot.conf exists | ||
1560 | cmd = "cat /boot/loader/entries/boot.conf" | ||
1561 | status, output = qemu.run_serial(cmd) | ||
1562 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | ||
1563 | |||
1318 | def test_fs_types(self): | 1564 | def test_fs_types(self): |
1319 | """Test filesystem types for empty and not empty partitions""" | 1565 | """Test filesystem types for empty and not empty partitions""" |
1320 | img = 'core-image-minimal' | 1566 | img = 'core-image-minimal' |
@@ -1446,8 +1692,8 @@ class Wic2(WicTestCase): | |||
1446 | os.rename(image_path, image_path + '.bak') | 1692 | os.rename(image_path, image_path + '.bak') |
1447 | os.rename(new_image_path, image_path) | 1693 | os.rename(new_image_path, image_path) |
1448 | 1694 | ||
1449 | # Check if it boots in qemu | 1695 | runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-minimal') or "" |
1450 | with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic') as qemu: | 1696 | with runqemu('core-image-minimal', ssh=False, runqemuparams='%s nographic' % (runqemu_params)) as qemu: |
1451 | cmd = "ls /etc/" | 1697 | cmd = "ls /etc/" |
1452 | status, output = qemu.run_serial('true') | 1698 | status, output = qemu.run_serial('true') |
1453 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | 1699 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) |
diff --git a/meta/lib/oeqa/selftest/context.py b/meta/lib/oeqa/selftest/context.py index 99186175e5..16f82c6737 100644 --- a/meta/lib/oeqa/selftest/context.py +++ b/meta/lib/oeqa/selftest/context.py | |||
@@ -102,6 +102,13 @@ class OESelftestTestContext(OETestContext): | |||
102 | oe.path.copytree(builddir + "/cache", newbuilddir + "/cache") | 102 | oe.path.copytree(builddir + "/cache", newbuilddir + "/cache") |
103 | oe.path.copytree(selftestdir, newselftestdir) | 103 | oe.path.copytree(selftestdir, newselftestdir) |
104 | 104 | ||
105 | # if the last line of local.conf in newbuilddir is not empty and does not end with newline then add one | ||
106 | localconf_path = newbuilddir + "/conf/local.conf" | ||
107 | with open(localconf_path, "r+", encoding="utf-8") as f: | ||
108 | last_line = f.readlines()[-1] | ||
109 | if last_line and not last_line.endswith("\n"): | ||
110 | f.write("\n") | ||
111 | |||
105 | subprocess.check_output("git init && git add * && git commit -a -m 'initial'", cwd=newselftestdir, shell=True) | 112 | subprocess.check_output("git init && git add * && git commit -a -m 'initial'", cwd=newselftestdir, shell=True) |
106 | 113 | ||
107 | # Tried to used bitbake-layers add/remove but it requires recipe parsing and hence is too slow | 114 | # Tried to used bitbake-layers add/remove but it requires recipe parsing and hence is too slow |
@@ -114,11 +121,15 @@ class OESelftestTestContext(OETestContext): | |||
114 | bblayers_abspath = [os.path.abspath(path) for path in bblayers.split()] | 121 | bblayers_abspath = [os.path.abspath(path) for path in bblayers.split()] |
115 | with open("%s/conf/bblayers.conf" % newbuilddir, "a") as f: | 122 | with open("%s/conf/bblayers.conf" % newbuilddir, "a") as f: |
116 | newbblayers = "# new bblayers to be used by selftest in the new build dir '%s'\n" % newbuilddir | 123 | newbblayers = "# new bblayers to be used by selftest in the new build dir '%s'\n" % newbuilddir |
124 | newbblayers += 'unset BBLAYERS\n' | ||
117 | newbblayers += 'BBLAYERS = "%s"\n' % ' '.join(bblayers_abspath) | 125 | newbblayers += 'BBLAYERS = "%s"\n' % ' '.join(bblayers_abspath) |
118 | f.write(newbblayers) | 126 | f.write(newbblayers) |
119 | 127 | ||
128 | # Rewrite builddir paths seen in environment variables | ||
120 | for e in os.environ: | 129 | for e in os.environ: |
121 | if builddir + "/" in os.environ[e]: | 130 | # Rewrite paths that absolutely point inside builddir |
131 | # (e.g $builddir/conf/ would be rewritten but not $builddir/../bitbake/) | ||
132 | if builddir + "/" in os.environ[e] and builddir + "/" in os.path.abspath(os.environ[e]): | ||
122 | os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/") | 133 | os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/") |
123 | if os.environ[e].endswith(builddir): | 134 | if os.environ[e].endswith(builddir): |
124 | os.environ[e] = os.environ[e].replace(builddir, newbuilddir) | 135 | os.environ[e] = os.environ[e].replace(builddir, newbuilddir) |
diff --git a/meta/lib/oeqa/targetcontrol.py b/meta/lib/oeqa/targetcontrol.py index 6e8b781973..cdf382ee21 100644 --- a/meta/lib/oeqa/targetcontrol.py +++ b/meta/lib/oeqa/targetcontrol.py | |||
@@ -88,7 +88,7 @@ class QemuTarget(BaseTarget): | |||
88 | 88 | ||
89 | supported_image_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic'] | 89 | supported_image_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic'] |
90 | 90 | ||
91 | def __init__(self, d, logger, image_fstype=None): | 91 | def __init__(self, d, logger, image_fstype=None, boot_patterns=None): |
92 | 92 | ||
93 | import oe.types | 93 | import oe.types |
94 | 94 | ||
@@ -141,7 +141,8 @@ class QemuTarget(BaseTarget): | |||
141 | dump_dir = dump_dir, | 141 | dump_dir = dump_dir, |
142 | logger = logger, | 142 | logger = logger, |
143 | tmpfsdir = d.getVar("RUNQEMU_TMPFS_DIR"), | 143 | tmpfsdir = d.getVar("RUNQEMU_TMPFS_DIR"), |
144 | serial_ports = len(d.getVar("SERIAL_CONSOLES").split())) | 144 | serial_ports = len(d.getVar("SERIAL_CONSOLES").split()), |
145 | boot_patterns = boot_patterns) | ||
145 | 146 | ||
146 | self.monitor_dumper = MonitorDumper(dump_monitor_cmds, dump_dir, self.runner) | 147 | self.monitor_dumper = MonitorDumper(dump_monitor_cmds, dump_dir, self.runner) |
147 | if (self.monitor_dumper): | 148 | if (self.monitor_dumper): |
diff --git a/meta/lib/oeqa/utils/__init__.py b/meta/lib/oeqa/utils/__init__.py index 53bdcbf266..e03f7e33bb 100644 --- a/meta/lib/oeqa/utils/__init__.py +++ b/meta/lib/oeqa/utils/__init__.py | |||
@@ -96,4 +96,10 @@ def get_json_result_dir(d): | |||
96 | custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR") | 96 | custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR") |
97 | if custom_json_result_dir: | 97 | if custom_json_result_dir: |
98 | json_result_dir = custom_json_result_dir | 98 | json_result_dir = custom_json_result_dir |
99 | return json_result_dir \ No newline at end of file | 99 | return json_result_dir |
100 | |||
101 | def get_artefact_dir(d): | ||
102 | custom_json_result_dir = d.getVar("OEQA_ARTEFACT_DIR") | ||
103 | if custom_json_result_dir: | ||
104 | return custom_json_result_dir | ||
105 | return os.path.join(d.getVar("LOG_DIR"), 'oeqa-artefacts') | ||
diff --git a/meta/lib/oeqa/utils/commands.py b/meta/lib/oeqa/utils/commands.py index 575e380017..b60a6e6c38 100644 --- a/meta/lib/oeqa/utils/commands.py +++ b/meta/lib/oeqa/utils/commands.py | |||
@@ -203,6 +203,8 @@ def runCmd(command, ignore_status=False, timeout=None, assert_error=True, sync=T | |||
203 | 203 | ||
204 | if result.status and not ignore_status: | 204 | if result.status and not ignore_status: |
205 | exc_output = result.output | 205 | exc_output = result.output |
206 | if result.error: | ||
207 | exc_output = exc_output + result.error | ||
206 | if limit_exc_output > 0: | 208 | if limit_exc_output > 0: |
207 | split = result.output.splitlines() | 209 | split = result.output.splitlines() |
208 | if len(split) > limit_exc_output: | 210 | if len(split) > limit_exc_output: |
@@ -283,7 +285,20 @@ def get_bb_vars(variables=None, target=None, postconfig=None): | |||
283 | return values | 285 | return values |
284 | 286 | ||
285 | def get_bb_var(var, target=None, postconfig=None): | 287 | def get_bb_var(var, target=None, postconfig=None): |
286 | return get_bb_vars([var], target, postconfig)[var] | 288 | if postconfig: |
289 | return bitbake("-e %s" % target or "", postconfig=postconfig).output | ||
290 | else: | ||
291 | # Fast-path for the non-postconfig case | ||
292 | cmd = ["bitbake-getvar", "--quiet", "--value", var] | ||
293 | if target: | ||
294 | cmd.extend(["--recipe", target]) | ||
295 | try: | ||
296 | return subprocess.run(cmd, check=True, text=True, stdout=subprocess.PIPE).stdout.strip() | ||
297 | except subprocess.CalledProcessError as e: | ||
298 | # We need to return None not the empty string if the variable hasn't been set. | ||
299 | if e.returncode == 1: | ||
300 | return None | ||
301 | raise | ||
287 | 302 | ||
288 | def get_test_layer(bblayers=None): | 303 | def get_test_layer(bblayers=None): |
289 | if bblayers is None: | 304 | if bblayers is None: |
@@ -312,9 +327,26 @@ def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec= | |||
312 | f.write('LAYERSERIES_COMPAT_%s = "%s"\n' % (templayername, corenames)) | 327 | f.write('LAYERSERIES_COMPAT_%s = "%s"\n' % (templayername, corenames)) |
313 | 328 | ||
314 | @contextlib.contextmanager | 329 | @contextlib.contextmanager |
315 | def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True): | 330 | def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, boot_patterns = {}, discard_writes=True): |
316 | """ | 331 | """ |
317 | launch_cmd means directly run the command, don't need set rootfs or env vars. | 332 | Starts a context manager for a 'oeqa.targetcontrol.QemuTarget' resource. |
333 | The underlying Qemu will be booted into a shell when the generator yields | ||
334 | and stopped when the 'with' block exits. | ||
335 | |||
336 | Usage: | ||
337 | |||
338 | with runqemu('core-image-minimal') as qemu: | ||
339 | qemu.run_serial('cat /proc/cpuinfo') | ||
340 | |||
341 | Args: | ||
342 | pn (str): (image) recipe to run on | ||
343 | ssh (boolean): whether or not to enable SSH (network access) | ||
344 | runqemuparams (str): space-separated list of params to pass to 'runqemu' script (like 'nographics', 'ovmf', etc.) | ||
345 | image_fstype (str): IMAGE_FSTYPE to use | ||
346 | launch_cmd (str): directly run this command and bypass automatic runqemu parameter generation | ||
347 | overrides (dict): dict of "'<bitbake-variable>': value" pairs that allows overriding bitbake variables | ||
348 | boot_patterns (dict): dict of "'<pattern-name>': value" pairs to override default boot patterns, e.g. when not booting Linux | ||
349 | discard_writes (boolean): enables qemu -snapshot feature to prevent modifying original image | ||
318 | """ | 350 | """ |
319 | 351 | ||
320 | import bb.tinfoil | 352 | import bb.tinfoil |
@@ -345,7 +377,7 @@ def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, | |||
345 | 377 | ||
346 | logdir = recipedata.getVar("TEST_LOG_DIR") | 378 | logdir = recipedata.getVar("TEST_LOG_DIR") |
347 | 379 | ||
348 | qemu = oeqa.targetcontrol.QemuTarget(recipedata, targetlogger, image_fstype) | 380 | qemu = oeqa.targetcontrol.QemuTarget(recipedata, targetlogger, image_fstype, boot_patterns=boot_patterns) |
349 | finally: | 381 | finally: |
350 | # We need to shut down tinfoil early here in case we actually want | 382 | # We need to shut down tinfoil early here in case we actually want |
351 | # to run tinfoil-using utilities with the running QEMU instance. | 383 | # to run tinfoil-using utilities with the running QEMU instance. |
diff --git a/meta/lib/oeqa/utils/gitarchive.py b/meta/lib/oeqa/utils/gitarchive.py index 10cb267dfa..7e1d505748 100644 --- a/meta/lib/oeqa/utils/gitarchive.py +++ b/meta/lib/oeqa/utils/gitarchive.py | |||
@@ -67,7 +67,7 @@ def git_commit_data(repo, data_dir, branch, message, exclude, notes, log): | |||
67 | 67 | ||
68 | # Remove files that are excluded | 68 | # Remove files that are excluded |
69 | if exclude: | 69 | if exclude: |
70 | repo.run_cmd(['rm', '--cached'] + [f for f in exclude], env_update) | 70 | repo.run_cmd(['rm', '--cached', '--ignore-unmatch'] + [f for f in exclude], env_update) |
71 | 71 | ||
72 | tree = repo.run_cmd('write-tree', env_update) | 72 | tree = repo.run_cmd('write-tree', env_update) |
73 | 73 | ||
@@ -146,7 +146,7 @@ def expand_tag_strings(repo, name_pattern, msg_subj_pattern, msg_body_pattern, | |||
146 | keyws['tag_number'] = '{tag_number}' | 146 | keyws['tag_number'] = '{tag_number}' |
147 | tag_re = format_str(name_pattern, keyws) | 147 | tag_re = format_str(name_pattern, keyws) |
148 | # Replace parentheses for proper regex matching | 148 | # Replace parentheses for proper regex matching |
149 | tag_re = tag_re.replace('(', '\(').replace(')', '\)') + '$' | 149 | tag_re = tag_re.replace('(', r'\(').replace(')', r'\)') + '$' |
150 | # Inject regex group pattern for 'tag_number' | 150 | # Inject regex group pattern for 'tag_number' |
151 | tag_re = tag_re.format(tag_number='(?P<tag_number>[0-9]{1,5})') | 151 | tag_re = tag_re.format(tag_number='(?P<tag_number>[0-9]{1,5})') |
152 | 152 | ||
@@ -202,6 +202,8 @@ def gitarchive(data_dir, git_dir, no_create, bare, commit_msg_subject, commit_ms | |||
202 | log.info("Pushing data to remote") | 202 | log.info("Pushing data to remote") |
203 | data_repo.run_cmd(cmd) | 203 | data_repo.run_cmd(cmd) |
204 | 204 | ||
205 | return tag_name | ||
206 | |||
205 | # Container class for tester revisions | 207 | # Container class for tester revisions |
206 | TestedRev = namedtuple('TestedRev', 'commit commit_number tags') | 208 | TestedRev = namedtuple('TestedRev', 'commit commit_number tags') |
207 | 209 | ||
diff --git a/meta/lib/oeqa/utils/metadata.py b/meta/lib/oeqa/utils/metadata.py index 15ec190c4a..b320df67e0 100644 --- a/meta/lib/oeqa/utils/metadata.py +++ b/meta/lib/oeqa/utils/metadata.py | |||
@@ -76,6 +76,10 @@ def git_rev_info(path): | |||
76 | info['commit_count'] = int(subprocess.check_output(["git", "rev-list", "--count", "HEAD"], cwd=path).decode('utf-8').strip()) | 76 | info['commit_count'] = int(subprocess.check_output(["git", "rev-list", "--count", "HEAD"], cwd=path).decode('utf-8').strip()) |
77 | except subprocess.CalledProcessError: | 77 | except subprocess.CalledProcessError: |
78 | pass | 78 | pass |
79 | try: | ||
80 | info['commit_time'] = int(subprocess.check_output(["git", "show", "--no-patch", "--format=%ct", "HEAD"], cwd=path).decode('utf-8').strip()) | ||
81 | except subprocess.CalledProcessError: | ||
82 | pass | ||
79 | return info | 83 | return info |
80 | try: | 84 | try: |
81 | repo = Repo(path, search_parent_directories=True) | 85 | repo = Repo(path, search_parent_directories=True) |
@@ -83,6 +87,7 @@ def git_rev_info(path): | |||
83 | return info | 87 | return info |
84 | info['commit'] = repo.head.commit.hexsha | 88 | info['commit'] = repo.head.commit.hexsha |
85 | info['commit_count'] = repo.head.commit.count() | 89 | info['commit_count'] = repo.head.commit.count() |
90 | info['commit_time'] = repo.head.commit.committed_date | ||
86 | try: | 91 | try: |
87 | info['branch'] = repo.active_branch.name | 92 | info['branch'] = repo.active_branch.name |
88 | except TypeError: | 93 | except TypeError: |
diff --git a/meta/lib/oeqa/utils/postactions.py b/meta/lib/oeqa/utils/postactions.py index ecdddd2d40..c69481db6c 100644 --- a/meta/lib/oeqa/utils/postactions.py +++ b/meta/lib/oeqa/utils/postactions.py | |||
@@ -7,23 +7,20 @@ | |||
7 | # Run a set of actions after tests. The runner provides internal data | 7 | # Run a set of actions after tests. The runner provides internal data |
8 | # dictionary as well as test context to any action to run. | 8 | # dictionary as well as test context to any action to run. |
9 | 9 | ||
10 | from oeqa.utils import get_json_result_dir | 10 | import datetime |
11 | 11 | import io | |
12 | def create_artifacts_directory(d, tc): | 12 | import os |
13 | import shutil | 13 | import stat |
14 | 14 | import subprocess | |
15 | local_artifacts_dir = os.path.join(get_json_result_dir(d), "artifacts") | 15 | import tempfile |
16 | if os.path.isdir(local_artifacts_dir): | 16 | from oeqa.utils import get_artefact_dir |
17 | shutil.rmtree(local_artifacts_dir) | ||
18 | |||
19 | os.makedirs(local_artifacts_dir) | ||
20 | 17 | ||
21 | ################################################################## | 18 | ################################################################## |
22 | # Host/target statistics | 19 | # Host/target statistics |
23 | ################################################################## | 20 | ################################################################## |
24 | 21 | ||
25 | def get_target_disk_usage(d, tc): | 22 | def get_target_disk_usage(d, tc, artifacts_list, outputdir): |
26 | output_file = os.path.join(get_json_result_dir(d), "artifacts", "target_disk_usage.txt") | 23 | output_file = os.path.join(outputdir, "target_disk_usage.txt") |
27 | try: | 24 | try: |
28 | (status, output) = tc.target.run('df -h') | 25 | (status, output) = tc.target.run('df -h') |
29 | with open(output_file, 'w') as f: | 26 | with open(output_file, 'w') as f: |
@@ -32,10 +29,10 @@ def get_target_disk_usage(d, tc): | |||
32 | except Exception as e: | 29 | except Exception as e: |
33 | bb.warn(f"Can not get target disk usage: {e}") | 30 | bb.warn(f"Can not get target disk usage: {e}") |
34 | 31 | ||
35 | def get_host_disk_usage(d, tc): | 32 | def get_host_disk_usage(d, tc, artifacts_list, outputdir): |
36 | import subprocess | 33 | import subprocess |
37 | 34 | ||
38 | output_file = os.path.join(get_json_result_dir(d), "artifacts", "host_disk_usage.txt") | 35 | output_file = os.path.join(outputdir, "host_disk_usage.txt") |
39 | try: | 36 | try: |
40 | with open(output_file, 'w') as f: | 37 | with open(output_file, 'w') as f: |
41 | output = subprocess.run(['df', '-hl'], check=True, text=True, stdout=f, env={}) | 38 | output = subprocess.run(['df', '-hl'], check=True, text=True, stdout=f, env={}) |
@@ -61,25 +58,22 @@ def get_artifacts_list(target, raw_list): | |||
61 | 58 | ||
62 | return result | 59 | return result |
63 | 60 | ||
64 | def retrieve_test_artifacts(target, artifacts_list, target_dir): | 61 | def list_and_fetch_failed_tests_artifacts(d, tc, artifacts_list, outputdir): |
65 | local_artifacts_dir = os.path.join(target_dir, "artifacts") | 62 | artifacts_list = get_artifacts_list(tc.target, artifacts_list) |
66 | for artifact_path in artifacts_list: | ||
67 | if not os.path.isabs(artifact_path): | ||
68 | bb.warn(f"{artifact_path} is not an absolute path") | ||
69 | continue | ||
70 | try: | ||
71 | dest_dir = os.path.join(local_artifacts_dir, os.path.dirname(artifact_path[1:])) | ||
72 | os.makedirs(dest_dir, exist_ok=True) | ||
73 | target.copyFrom(artifact_path, dest_dir) | ||
74 | except Exception as e: | ||
75 | bb.warn(f"Can not retrieve {artifact_path} from test target: {e}") | ||
76 | |||
77 | def list_and_fetch_failed_tests_artifacts(d, tc): | ||
78 | artifacts_list = get_artifacts_list(tc.target, d.getVar("TESTIMAGE_FAILED_QA_ARTIFACTS")) | ||
79 | if not artifacts_list: | 63 | if not artifacts_list: |
80 | bb.warn("Could not load artifacts list, skip artifacts retrieval") | 64 | bb.warn("Could not load artifacts list, skip artifacts retrieval") |
81 | else: | 65 | return |
82 | retrieve_test_artifacts(tc.target, artifacts_list, get_json_result_dir(d)) | 66 | try: |
67 | # We need gnu tar for sparse files, not busybox | ||
68 | cmd = "tar --sparse -zcf - " + " ".join(artifacts_list) | ||
69 | (status, output) = tc.target.run(cmd, raw = True) | ||
70 | if status != 0 or not output: | ||
71 | raise Exception("Error while fetching compressed artifacts") | ||
72 | archive_name = os.path.join(outputdir, "tests_artifacts.tar.gz") | ||
73 | with open(archive_name, "wb") as f: | ||
74 | f.write(output) | ||
75 | except Exception as e: | ||
76 | bb.warn(f"Can not retrieve artifacts from test target: {e}") | ||
83 | 77 | ||
84 | 78 | ||
85 | ################################################################## | 79 | ################################################################## |
@@ -87,12 +81,22 @@ def list_and_fetch_failed_tests_artifacts(d, tc): | |||
87 | ################################################################## | 81 | ################################################################## |
88 | 82 | ||
89 | def run_failed_tests_post_actions(d, tc): | 83 | def run_failed_tests_post_actions(d, tc): |
84 | artifacts = d.getVar("TESTIMAGE_FAILED_QA_ARTIFACTS") | ||
85 | # Allow all the code to be disabled by having no artifacts set, e.g. for systems with no ssh support | ||
86 | if not artifacts: | ||
87 | return | ||
88 | |||
89 | outputdir = get_artefact_dir(d) | ||
90 | os.makedirs(outputdir, exist_ok=True) | ||
91 | datestr = datetime.datetime.now().strftime('%Y%m%d') | ||
92 | outputdir = tempfile.mkdtemp(prefix='oeqa-target-artefacts-%s-' % datestr, dir=outputdir) | ||
93 | os.chmod(outputdir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) | ||
94 | |||
90 | post_actions=[ | 95 | post_actions=[ |
91 | create_artifacts_directory, | ||
92 | list_and_fetch_failed_tests_artifacts, | 96 | list_and_fetch_failed_tests_artifacts, |
93 | get_target_disk_usage, | 97 | get_target_disk_usage, |
94 | get_host_disk_usage | 98 | get_host_disk_usage |
95 | ] | 99 | ] |
96 | 100 | ||
97 | for action in post_actions: | 101 | for action in post_actions: |
98 | action(d, tc) | 102 | action(d, tc, artifacts, outputdir) |
diff --git a/meta/lib/oeqa/utils/qemurunner.py b/meta/lib/oeqa/utils/qemurunner.py index cda43aad8c..c4db0cf038 100644 --- a/meta/lib/oeqa/utils/qemurunner.py +++ b/meta/lib/oeqa/utils/qemurunner.py | |||
@@ -30,6 +30,8 @@ control_range = list(range(0,32))+list(range(127,160)) | |||
30 | control_chars = [chr(x) for x in control_range | 30 | control_chars = [chr(x) for x in control_range |
31 | if chr(x) not in string.printable] | 31 | if chr(x) not in string.printable] |
32 | re_control_char = re.compile('[%s]' % re.escape("".join(control_chars))) | 32 | re_control_char = re.compile('[%s]' % re.escape("".join(control_chars))) |
33 | # Regex to remove the ANSI (color) control codes from console strings in order to match the text only | ||
34 | re_vt100 = re.compile(r'(\x1b\[|\x9b)[^@-_a-z]*[@-_a-z]|\x1b[@-_a-z]') | ||
33 | 35 | ||
34 | def getOutput(o): | 36 | def getOutput(o): |
35 | import fcntl | 37 | import fcntl |
@@ -101,7 +103,7 @@ class QemuRunner: | |||
101 | 103 | ||
102 | # Only override patterns that were set e.g. login user TESTIMAGE_BOOT_PATTERNS[send_login_user] = "webserver\n" | 104 | # Only override patterns that were set e.g. login user TESTIMAGE_BOOT_PATTERNS[send_login_user] = "webserver\n" |
103 | for pattern in accepted_patterns: | 105 | for pattern in accepted_patterns: |
104 | if not self.boot_patterns[pattern]: | 106 | if pattern not in self.boot_patterns or not self.boot_patterns[pattern]: |
105 | self.boot_patterns[pattern] = default_boot_patterns[pattern] | 107 | self.boot_patterns[pattern] = default_boot_patterns[pattern] |
106 | 108 | ||
107 | def create_socket(self): | 109 | def create_socket(self): |
@@ -265,12 +267,15 @@ class QemuRunner: | |||
265 | self.monitorpipe = os.fdopen(w, "w") | 267 | self.monitorpipe = os.fdopen(w, "w") |
266 | else: | 268 | else: |
267 | # child process | 269 | # child process |
268 | os.setpgrp() | 270 | try: |
269 | os.close(w) | 271 | os.setpgrp() |
270 | r = os.fdopen(r) | 272 | os.close(w) |
271 | x = r.read() | 273 | r = os.fdopen(r) |
272 | os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM) | 274 | x = r.read() |
273 | os._exit(0) | 275 | os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM) |
276 | finally: | ||
277 | # We must exit under all circumstances | ||
278 | os._exit(0) | ||
274 | 279 | ||
275 | self.logger.debug("runqemu started, pid is %s" % self.runqemu.pid) | 280 | self.logger.debug("runqemu started, pid is %s" % self.runqemu.pid) |
276 | self.logger.debug("waiting at most %d seconds for qemu pid (%s)" % | 281 | self.logger.debug("waiting at most %d seconds for qemu pid (%s)" % |
@@ -519,7 +524,6 @@ class QemuRunner: | |||
519 | except Exception as e: | 524 | except Exception as e: |
520 | self.logger.warning('Extra log data exception %s' % repr(e)) | 525 | self.logger.warning('Extra log data exception %s' % repr(e)) |
521 | data = None | 526 | data = None |
522 | self.thread.serial_lock.release() | ||
523 | return False | 527 | return False |
524 | 528 | ||
525 | with self.thread.serial_lock: | 529 | with self.thread.serial_lock: |
@@ -533,7 +537,7 @@ class QemuRunner: | |||
533 | self.logger.debug("Logged in as %s in serial console" % self.boot_patterns['send_login_user'].replace("\n", "")) | 537 | self.logger.debug("Logged in as %s in serial console" % self.boot_patterns['send_login_user'].replace("\n", "")) |
534 | if netconf: | 538 | if netconf: |
535 | # configure guest networking | 539 | # configure guest networking |
536 | cmd = "ifconfig eth0 %s netmask %s up\n" % (self.ip, self.netmask) | 540 | cmd = "ip addr add %s/%s dev eth0\nip link set dev eth0 up\n" % (self.ip, self.netmask) |
537 | output = self.run_serial(cmd, raw=True)[1] | 541 | output = self.run_serial(cmd, raw=True)[1] |
538 | if re.search(r"root@[a-zA-Z0-9\-]+:~#", output): | 542 | if re.search(r"root@[a-zA-Z0-9\-]+:~#", output): |
539 | self.logger.debug("configured ip address %s", self.ip) | 543 | self.logger.debug("configured ip address %s", self.ip) |
@@ -681,7 +685,7 @@ class QemuRunner: | |||
681 | time.sleep(0.1) | 685 | time.sleep(0.1) |
682 | answer = self.server_socket.recv(1024) | 686 | answer = self.server_socket.recv(1024) |
683 | if answer: | 687 | if answer: |
684 | data += answer.decode('utf-8') | 688 | data += re_vt100.sub("", answer.decode('utf-8')) |
685 | # Search the prompt to stop | 689 | # Search the prompt to stop |
686 | if re.search(self.boot_patterns['search_cmd_finished'], data): | 690 | if re.search(self.boot_patterns['search_cmd_finished'], data): |
687 | break | 691 | break |
@@ -745,8 +749,10 @@ class LoggingThread(threading.Thread): | |||
745 | def threadtarget(self): | 749 | def threadtarget(self): |
746 | try: | 750 | try: |
747 | self.eventloop() | 751 | self.eventloop() |
748 | except Exception as e: | 752 | except Exception: |
749 | self.logger.warning("Exception %s in logging thread" % traceback.format_exception(e)) | 753 | exc_type, exc_value, exc_traceback = sys.exc_info() |
754 | self.logger.warning("Exception %s in logging thread" % | ||
755 | traceback.format_exception(exc_type, exc_value, exc_traceback)) | ||
750 | finally: | 756 | finally: |
751 | self.teardown() | 757 | self.teardown() |
752 | 758 | ||
@@ -822,10 +828,12 @@ class LoggingThread(threading.Thread): | |||
822 | self.logfunc(data, ".stdout") | 828 | self.logfunc(data, ".stdout") |
823 | elif self.serialsock and self.serialsock.fileno() == fd: | 829 | elif self.serialsock and self.serialsock.fileno() == fd: |
824 | if self.serial_lock.acquire(blocking=False): | 830 | if self.serial_lock.acquire(blocking=False): |
825 | data = self.recv(1024, self.serialsock) | 831 | try: |
826 | self.logger.debug("Data received serial thread %s" % data.decode('utf-8', 'replace')) | 832 | data = self.recv(1024, self.serialsock) |
827 | self.logfunc(data, ".2") | 833 | self.logger.debug("Data received serial thread %s" % data.decode('utf-8', 'replace')) |
828 | self.serial_lock.release() | 834 | self.logfunc(data, ".2") |
835 | finally: | ||
836 | self.serial_lock.release() | ||
829 | else: | 837 | else: |
830 | serial_registered = False | 838 | serial_registered = False |
831 | poll.unregister(self.serialsock.fileno()) | 839 | poll.unregister(self.serialsock.fileno()) |
diff --git a/meta/lib/oeqa/utils/sshcontrol.py b/meta/lib/oeqa/utils/sshcontrol.py index 36c2ecb3db..88a61aff63 100644 --- a/meta/lib/oeqa/utils/sshcontrol.py +++ b/meta/lib/oeqa/utils/sshcontrol.py | |||
@@ -57,8 +57,10 @@ class SSHProcess(object): | |||
57 | if select.select([self.process.stdout], [], [], 5)[0] != []: | 57 | if select.select([self.process.stdout], [], [], 5)[0] != []: |
58 | data = os.read(self.process.stdout.fileno(), 1024) | 58 | data = os.read(self.process.stdout.fileno(), 1024) |
59 | if not data: | 59 | if not data: |
60 | self.process.stdout.close() | 60 | self.process.poll() |
61 | eof = True | 61 | if self.process.returncode is not None: |
62 | self.process.stdout.close() | ||
63 | eof = True | ||
62 | else: | 64 | else: |
63 | data = data.decode("utf-8") | 65 | data = data.decode("utf-8") |
64 | output += data | 66 | output += data |
diff --git a/meta/lib/oeqa/utils/subprocesstweak.py b/meta/lib/oeqa/utils/subprocesstweak.py index 3e43ed547b..1774513023 100644 --- a/meta/lib/oeqa/utils/subprocesstweak.py +++ b/meta/lib/oeqa/utils/subprocesstweak.py | |||
@@ -8,16 +8,11 @@ import subprocess | |||
8 | class OETestCalledProcessError(subprocess.CalledProcessError): | 8 | class OETestCalledProcessError(subprocess.CalledProcessError): |
9 | def __str__(self): | 9 | def __str__(self): |
10 | def strify(o): | 10 | def strify(o): |
11 | if isinstance(o, bytes): | 11 | return o.decode("utf-8", errors="replace") if isinstance(o, bytes) else o |
12 | return o.decode("utf-8", errors="replace") | ||
13 | else: | ||
14 | return o | ||
15 | 12 | ||
16 | s = "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode) | 13 | s = super().__str__() |
17 | if hasattr(self, "output") and self.output: | 14 | s = s + "\nStandard Output: " + strify(self.output) |
18 | s = s + "\nStandard Output: " + strify(self.output) | 15 | s = s + "\nStandard Error: " + strify(self.stderr) |
19 | if hasattr(self, "stderr") and self.stderr: | ||
20 | s = s + "\nStandard Error: " + strify(self.stderr) | ||
21 | return s | 16 | return s |
22 | 17 | ||
23 | def errors_have_output(): | 18 | def errors_have_output(): |
diff --git a/meta/lib/oeqa/utils/testexport.py b/meta/lib/oeqa/utils/testexport.py index e89d130a9c..3ab024d9e9 100644 --- a/meta/lib/oeqa/utils/testexport.py +++ b/meta/lib/oeqa/utils/testexport.py | |||
@@ -60,17 +60,17 @@ def process_binaries(d, params): | |||
60 | export_env = d.getVar("TEST_EXPORT_ONLY") | 60 | export_env = d.getVar("TEST_EXPORT_ONLY") |
61 | 61 | ||
62 | def extract_binary(pth_to_pkg, dest_pth=None): | 62 | def extract_binary(pth_to_pkg, dest_pth=None): |
63 | cpio_command = runCmd("which cpio") | 63 | tar_command = runCmd("which tar") |
64 | rpm2cpio_command = runCmd("ls /usr/bin/rpm2cpio") | 64 | rpm2archive_command = runCmd("ls /usr/bin/rpm2archive") |
65 | if (cpio_command.status != 0) and (rpm2cpio_command.status != 0): | 65 | if (tar_command.status != 0) and (rpm2archive_command.status != 0): |
66 | bb.fatal("Either \"rpm2cpio\" or \"cpio\" tools are not available on your system." | 66 | bb.fatal("Either \"rpm2archive\" or \"tar\" tools are not available on your system." |
67 | "All binaries extraction processes will not be available, crashing all related tests." | 67 | "All binaries extraction processes will not be available, crashing all related tests." |
68 | "Please install them according to your OS recommendations") # will exit here | 68 | "Please install them according to your OS recommendations") # will exit here |
69 | if dest_pth: | 69 | if dest_pth: |
70 | os.chdir(dest_pth) | 70 | os.chdir(dest_pth) |
71 | else: | 71 | else: |
72 | os.chdir("%s" % os.sep)# this is for native package | 72 | os.chdir("%s" % os.sep)# this is for native package |
73 | extract_bin_command = runCmd("%s %s | %s -idm" % (rpm2cpio_command.output, pth_to_pkg, cpio_command.output)) # semi-hardcoded because of a bug on poky's rpm2cpio | 73 | extract_bin_command = runCmd("%s -n %s | %s xv" % (rpm2archive_command.output, pth_to_pkg, tar_command.output)) # semi-hardcoded because of a bug on poky's rpm2cpio |
74 | return extract_bin_command | 74 | return extract_bin_command |
75 | 75 | ||
76 | if determine_if_poky_env(): # machine with poky environment | 76 | if determine_if_poky_env(): # machine with poky environment |
diff --git a/meta/lib/patchtest/README.md b/meta/lib/patchtest/README.md index f66613c0c1..27cc61c802 100644 --- a/meta/lib/patchtest/README.md +++ b/meta/lib/patchtest/README.md | |||
@@ -1,12 +1,12 @@ | |||
1 | # patchtest selftests for openembedded-core | 1 | # patchtest selftests for openembedded-core |
2 | 2 | ||
3 | This directory provides a test suite and selftest script for use with the | 3 | This directory provides a test suite and selftest script for use with the |
4 | patchtest repository: https://git.yoctoproject.org/patchtest/ | 4 | patchtest repository: <https://git.yoctoproject.org/patchtest/> |
5 | 5 | ||
6 | To setup for use: | 6 | To setup for use: |
7 | 7 | ||
8 | 1. Clone https://git.openembedded.org/openembedded-core (this repo) and https://git.openembedded.org/bitbake/ | 8 | 1. Clone <https://git.openembedded.org/openembedded-core> (this repo) and <https://git.openembedded.org/bitbake/> |
9 | 2. Clone https://git.yoctoproject.org/patchtest | 9 | 2. Clone <https://git.yoctoproject.org/patchtest> |
10 | 3. Install the necessary Python modules: in meta/lib/patchtest or the patchtest | 10 | 3. Install the necessary Python modules: in meta/lib/patchtest or the patchtest |
11 | repo, do `pip install -r requirements.txt` | 11 | repo, do `pip install -r requirements.txt` |
12 | 4. Add patchtest to PATH: `export PATH=/path/to/patchtest/repo:$PATH` | 12 | 4. Add patchtest to PATH: `export PATH=/path/to/patchtest/repo:$PATH` |
@@ -17,4 +17,4 @@ To setup for use: | |||
17 | 7. Finally, run the selftest script: `./meta/lib/patchtest/selftest/selftest` | 17 | 7. Finally, run the selftest script: `./meta/lib/patchtest/selftest/selftest` |
18 | 18 | ||
19 | For more information on using patchtest, see the patchtest repo at | 19 | For more information on using patchtest, see the patchtest repo at |
20 | https://git.yoctoproject.org/patchtest/. | 20 | <https://git.yoctoproject.org/patchtest/>. |
diff --git a/meta/lib/patchtest/mbox.py b/meta/lib/patchtest/mbox.py new file mode 100644 index 0000000000..1d95819b7a --- /dev/null +++ b/meta/lib/patchtest/mbox.py | |||
@@ -0,0 +1,108 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | |||
3 | # series.py | ||
4 | # | ||
5 | # Read a series' mbox file and get information about the patches | ||
6 | # contained | ||
7 | # | ||
8 | # Copyright (C) 2024 BayLibre SAS | ||
9 | # | ||
10 | # SPDX-License-Identifier: GPL-2.0-only | ||
11 | # | ||
12 | |||
13 | import email | ||
14 | import re | ||
15 | |||
16 | # From: https://stackoverflow.com/questions/59681461/read-a-big-mbox-file-with-python | ||
17 | class MboxReader: | ||
18 | def __init__(self, filepath): | ||
19 | self.handle = open(filepath, 'rb') | ||
20 | assert self.handle.readline().startswith(b'From ') | ||
21 | |||
22 | def __enter__(self): | ||
23 | return self | ||
24 | |||
25 | def __exit__(self, exc_type, exc_value, exc_traceback): | ||
26 | self.handle.close() | ||
27 | |||
28 | def __iter__(self): | ||
29 | return iter(self.__next__()) | ||
30 | |||
31 | def __next__(self): | ||
32 | lines = [] | ||
33 | while True: | ||
34 | line = self.handle.readline() | ||
35 | if line == b'' or line.startswith(b'From '): | ||
36 | yield email.message_from_bytes(b''.join(lines)) | ||
37 | if line == b'': | ||
38 | break | ||
39 | lines = [] | ||
40 | continue | ||
41 | lines.append(line) | ||
42 | |||
43 | class Patch: | ||
44 | def __init__(self, data): | ||
45 | self.author = data['From'] | ||
46 | self.to = data['To'] | ||
47 | self.cc = data['Cc'] | ||
48 | self.subject = data['Subject'] | ||
49 | self.split_body = re.split('---', data.get_payload(), maxsplit=1) | ||
50 | self.commit_message = self.split_body[0] | ||
51 | self.diff = self.split_body[1] | ||
52 | |||
53 | class PatchSeries: | ||
54 | def __init__(self, filepath): | ||
55 | with MboxReader(filepath) as mbox: | ||
56 | self.patches = [Patch(message) for message in mbox] | ||
57 | |||
58 | assert self.patches | ||
59 | self.patch_count = len(self.patches) | ||
60 | self.path = filepath | ||
61 | |||
62 | @property | ||
63 | def path(self): | ||
64 | return self.path | ||
65 | |||
66 | self.branch = self.get_branch() | ||
67 | |||
68 | def get_branch(self): | ||
69 | fullprefix = "" | ||
70 | pattern = re.compile(r"(\[.*\])", re.DOTALL) | ||
71 | |||
72 | # There should be at least one patch in the series and it should | ||
73 | # include the branch name in the subject, so parse that | ||
74 | match = pattern.search(self.patches[0].subject) | ||
75 | if match: | ||
76 | fullprefix = match.group(1) | ||
77 | |||
78 | branch, branches, valid_branches = None, [], [] | ||
79 | |||
80 | if fullprefix: | ||
81 | prefix = fullprefix.strip('[]') | ||
82 | branches = [ b.strip() for b in prefix.split(',')] | ||
83 | valid_branches = [b for b in branches if PatchSeries.valid_branch(b)] | ||
84 | |||
85 | if len(valid_branches): | ||
86 | branch = valid_branches[0] | ||
87 | |||
88 | # Get the branch name excluding any brackets. If nothing was | ||
89 | # found, then assume there was no branch tag in the subject line | ||
90 | # and that the patch targets master | ||
91 | if branch is not None: | ||
92 | return branch.split(']')[0] | ||
93 | else: | ||
94 | return "master" | ||
95 | |||
96 | @staticmethod | ||
97 | def valid_branch(branch): | ||
98 | """ Check if branch is valid name """ | ||
99 | lbranch = branch.lower() | ||
100 | |||
101 | invalid = lbranch.startswith('patch') or \ | ||
102 | lbranch.startswith('rfc') or \ | ||
103 | lbranch.startswith('resend') or \ | ||
104 | re.search(r'^v\d+', lbranch) or \ | ||
105 | re.search(r'^\d+/\d+', lbranch) | ||
106 | |||
107 | return not invalid | ||
108 | |||
diff --git a/meta/lib/patchtest/patch.py b/meta/lib/patchtest/patch.py deleted file mode 100644 index baf6283873..0000000000 --- a/meta/lib/patchtest/patch.py +++ /dev/null | |||
@@ -1,62 +0,0 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | # | ||
4 | # patchtestpatch: PatchTestPatch class which abstracts a patch file | ||
5 | # | ||
6 | # Copyright (C) 2016 Intel Corporation | ||
7 | # | ||
8 | # SPDX-License-Identifier: GPL-2.0-only | ||
9 | # | ||
10 | |||
11 | import logging | ||
12 | import utils | ||
13 | |||
14 | logger = logging.getLogger('patchtest') | ||
15 | |||
16 | class PatchTestPatch(object): | ||
17 | MERGE_STATUS_INVALID = 'INVALID' | ||
18 | MERGE_STATUS_NOT_MERGED = 'NOTMERGED' | ||
19 | MERGE_STATUS_MERGED_SUCCESSFULL = 'PASS' | ||
20 | MERGE_STATUS_MERGED_FAIL = 'FAIL' | ||
21 | MERGE_STATUS = (MERGE_STATUS_INVALID, | ||
22 | MERGE_STATUS_NOT_MERGED, | ||
23 | MERGE_STATUS_MERGED_SUCCESSFULL, | ||
24 | MERGE_STATUS_MERGED_FAIL) | ||
25 | |||
26 | def __init__(self, path, forcereload=False): | ||
27 | self._path = path | ||
28 | self._forcereload = forcereload | ||
29 | |||
30 | self._contents = None | ||
31 | self._branch = None | ||
32 | self._merge_status = PatchTestPatch.MERGE_STATUS_NOT_MERGED | ||
33 | |||
34 | @property | ||
35 | def contents(self): | ||
36 | if self._forcereload or (not self._contents): | ||
37 | logger.debug('Reading %s contents' % self._path) | ||
38 | try: | ||
39 | with open(self._path, newline='') as _f: | ||
40 | self._contents = _f.read() | ||
41 | except IOError: | ||
42 | logger.warn("Reading the mbox %s failed" % self.resource) | ||
43 | return self._contents | ||
44 | |||
45 | @property | ||
46 | def path(self): | ||
47 | return self._path | ||
48 | |||
49 | @property | ||
50 | def branch(self): | ||
51 | if not self._branch: | ||
52 | self._branch = utils.get_branch(self._path) | ||
53 | return self._branch | ||
54 | |||
55 | def setmergestatus(self, status): | ||
56 | self._merge_status = status | ||
57 | |||
58 | def getmergestatus(self): | ||
59 | return self._merge_status | ||
60 | |||
61 | merge_status = property(getmergestatus, setmergestatus) | ||
62 | |||
diff --git a/meta/lib/patchtest/data.py b/meta/lib/patchtest/patchtest_parser.py index 356259921d..2a11cb76c2 100644 --- a/meta/lib/patchtest/data.py +++ b/meta/lib/patchtest/patchtest_parser.py | |||
@@ -15,19 +15,11 @@ | |||
15 | 15 | ||
16 | import os | 16 | import os |
17 | import argparse | 17 | import argparse |
18 | import collections | ||
19 | import logging | ||
20 | |||
21 | logger=logging.getLogger('patchtest') | ||
22 | info=logger.info | ||
23 | 18 | ||
24 | default_testdir = os.path.abspath(os.path.dirname(__file__) + "/tests") | 19 | default_testdir = os.path.abspath(os.path.dirname(__file__) + "/tests") |
25 | default_repodir = os.path.abspath(os.path.dirname(__file__) + "/../../..") | 20 | default_repodir = os.path.abspath(os.path.dirname(__file__) + "/../../..") |
26 | 21 | ||
27 | # Data store commonly used to share values between pre and post-merge tests | 22 | class PatchtestParser(object): |
28 | PatchTestDataStore = collections.defaultdict(str) | ||
29 | |||
30 | class PatchTestInput(object): | ||
31 | """Abstract the patchtest argument parser""" | 23 | """Abstract the patchtest argument parser""" |
32 | 24 | ||
33 | @classmethod | 25 | @classmethod |
diff --git a/meta/lib/patchtest/patchtest_patterns.py b/meta/lib/patchtest/patchtest_patterns.py new file mode 100644 index 0000000000..50637cf499 --- /dev/null +++ b/meta/lib/patchtest/patchtest_patterns.py | |||
@@ -0,0 +1,98 @@ | |||
1 | # common pyparsing variables | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | import pyparsing | ||
8 | import re | ||
9 | |||
10 | # general | ||
11 | colon = pyparsing.Literal(":") | ||
12 | line_start = pyparsing.LineStart() | ||
13 | line_end = pyparsing.LineEnd() | ||
14 | lessthan = pyparsing.Literal("<") | ||
15 | greaterthan = pyparsing.Literal(">") | ||
16 | inappropriate = pyparsing.CaselessLiteral("Inappropriate") | ||
17 | submitted = pyparsing.CaselessLiteral("Submitted") | ||
18 | |||
19 | # word related | ||
20 | nestexpr = pyparsing.nestedExpr(opener='[', closer=']') | ||
21 | inappropriateinfo = pyparsing.Literal("Inappropriate") + nestexpr | ||
22 | submittedinfo = pyparsing.Literal("Submitted") + nestexpr | ||
23 | word = pyparsing.Word(pyparsing.alphas) | ||
24 | worddot = pyparsing.Word(pyparsing.alphas+".") | ||
25 | |||
26 | # metadata | ||
27 | |||
28 | metadata_lic = 'LICENSE' | ||
29 | invalid_license = 'PATCHTESTINVALID' | ||
30 | metadata_chksum = 'LIC_FILES_CHKSUM' | ||
31 | license_var = 'LICENSE' | ||
32 | closed = 'CLOSED' | ||
33 | lictag_re = pyparsing.AtLineStart("License-Update:") | ||
34 | lic_chksum_added = pyparsing.AtLineStart("+" + metadata_chksum) | ||
35 | lic_chksum_removed = pyparsing.AtLineStart("-" + metadata_chksum) | ||
36 | add_mark = pyparsing.Regex('\\+ ') | ||
37 | patch_max_line_length = 200 | ||
38 | metadata_src_uri = "SRC_URI" | ||
39 | metadata_summary = "SUMMARY" | ||
40 | cve_check_ignore_var = "CVE_CHECK_IGNORE" | ||
41 | cve_status_var = "CVE_STATUS" | ||
42 | endcommit_messages_regex = re.compile( | ||
43 | r"\(From \w+-\w+ rev:|(?<!\S)Signed-off-by|(?<!\S)---\n" | ||
44 | ) | ||
45 | patchmetadata_regex = re.compile( | ||
46 | r"-{3} \S+|\+{3} \S+|@{2} -\d+,\d+ \+\d+,\d+ @{2} \S+" | ||
47 | ) | ||
48 | |||
49 | # mbox | ||
50 | auh_email = 'auh@yoctoproject.org' | ||
51 | |||
52 | invalid_submitters = [pyparsing.Regex("^Upgrade Helper.+"), | ||
53 | pyparsing.Regex(auh_email), | ||
54 | pyparsing.Regex("uh@not\.set"), | ||
55 | pyparsing.Regex("\S+@example\.com")] | ||
56 | |||
57 | mbox_bugzilla = pyparsing.Regex('\[\s?YOCTO.*\]') | ||
58 | mbox_bugzilla_validation = pyparsing.Regex('\[(\s?YOCTO\s?#\s?(\d+)\s?,?)+\]') | ||
59 | mbox_revert_shortlog_regex = pyparsing.Regex('Revert\s+".*"') | ||
60 | mbox_shortlog_maxlength = 90 | ||
61 | # based on https://stackoverflow.com/questions/30281026/regex-parsing-github-usernames-javascript | ||
62 | mbox_github_username = pyparsing.Regex('\B@([a-z0-9](?:-(?=[a-z0-9])|[a-z0-9]){0,38}(?<=[a-z0-9]))') | ||
63 | |||
64 | # patch | ||
65 | |||
66 | cve = pyparsing.Regex("CVE\-\d{4}\-\d+") | ||
67 | cve_payload_tag = pyparsing.Regex("\+CVE:(\s+CVE\-\d{4}\-\d+)+") | ||
68 | upstream_status_regex = pyparsing.AtLineStart("+" + "Upstream-Status") | ||
69 | |||
70 | # shortlog | ||
71 | |||
72 | shortlog_target = pyparsing.OneOrMore(pyparsing.Word(pyparsing.printables.replace(':',''))) | ||
73 | shortlog_summary = pyparsing.OneOrMore(pyparsing.Word(pyparsing.printables)) | ||
74 | shortlog = line_start + shortlog_target + colon + shortlog_summary + line_end | ||
75 | |||
76 | # signed-off-bys | ||
77 | |||
78 | email_pattern = pyparsing.Regex(r"(?P<user>[A-Za-z0-9._%+-]+)@(?P<hostname>[A-Za-z0-9.-]+)\.(?P<domain>[A-Za-z]{2,})") | ||
79 | |||
80 | signed_off_by_prefix = pyparsing.Literal("Signed-off-by:") | ||
81 | signed_off_by_name = pyparsing.Regex('\S+.*(?= <)') | ||
82 | signed_off_by_email = lessthan + email_pattern + greaterthan | ||
83 | signed_off_by = pyparsing.AtLineStart(signed_off_by_prefix + signed_off_by_name + signed_off_by_email) | ||
84 | patch_signed_off_by = pyparsing.AtLineStart("+" + signed_off_by_prefix + signed_off_by_name + signed_off_by_email) | ||
85 | |||
86 | # upstream-status | ||
87 | |||
88 | upstream_status_literal_valid_status = ["Pending", "Backport", "Denied", "Inappropriate", "Submitted", "Inactive-Upstream"] | ||
89 | upstream_status_nonliteral_valid_status = ["Pending", "Backport", "Denied", "Inappropriate [reason]", "Submitted [where]", "Inactive-Upstream [lastcommit: when (and/or) lastrelease: when]"] | ||
90 | |||
91 | upstream_status_valid_status = pyparsing.Or( | ||
92 | [pyparsing.Literal(status) for status in upstream_status_literal_valid_status] | ||
93 | ) | ||
94 | |||
95 | upstream_status_prefix = pyparsing.Literal("Upstream-Status") | ||
96 | upstream_status = line_start + upstream_status_prefix + colon + upstream_status_valid_status | ||
97 | upstream_status_inappropriate_info = line_start + upstream_status_prefix + colon + inappropriateinfo | ||
98 | upstream_status_submitted_info = line_start + upstream_status_prefix + colon + submittedinfo | ||
diff --git a/meta/lib/patchtest/repo.py b/meta/lib/patchtest/repo.py index 5f361ac500..8ec8f68a0b 100644 --- a/meta/lib/patchtest/repo.py +++ b/meta/lib/patchtest/repo.py | |||
@@ -8,40 +8,27 @@ | |||
8 | # SPDX-License-Identifier: GPL-2.0-only | 8 | # SPDX-License-Identifier: GPL-2.0-only |
9 | # | 9 | # |
10 | 10 | ||
11 | import os | ||
12 | import utils | ||
13 | import logging | ||
14 | import git | 11 | import git |
15 | from patch import PatchTestPatch | 12 | import os |
16 | 13 | import mbox | |
17 | logger = logging.getLogger('patchtest') | ||
18 | info=logger.info | ||
19 | 14 | ||
20 | class PatchTestRepo(object): | 15 | class PatchTestRepo(object): |
21 | 16 | ||
22 | # prefixes used for temporal branches/stashes | 17 | # prefixes used for temporal branches/stashes |
23 | prefix = 'patchtest' | 18 | prefix = 'patchtest' |
24 | 19 | ||
25 | |||
26 | def __init__(self, patch, repodir, commit=None, branch=None): | 20 | def __init__(self, patch, repodir, commit=None, branch=None): |
27 | self._repodir = repodir | 21 | self.repodir = repodir |
28 | self._repo = git.Repo.init(repodir) | 22 | self.repo = git.Repo.init(repodir) |
29 | self._patch = PatchTestPatch(patch) | 23 | self.patch = mbox.PatchSeries(patch) |
30 | self._current_branch = self._repo.active_branch.name | 24 | self.current_branch = self.repo.active_branch.name |
31 | 25 | ||
32 | # targeted branch defined on the patch may be invalid, so make sure there | 26 | # targeted branch defined on the patch may be invalid, so make sure there |
33 | # is a corresponding remote branch | 27 | # is a corresponding remote branch |
34 | valid_patch_branch = None | 28 | valid_patch_branch = None |
35 | if self._patch.branch in self._repo.branches: | 29 | if self.patch.branch in self.repo.branches: |
36 | valid_patch_branch = self._patch.branch | 30 | valid_patch_branch = self.patch.branch |
37 | 31 | ||
38 | # Target Branch | ||
39 | # Priority (top has highest priority): | ||
40 | # 1. branch given at cmd line | ||
41 | # 2. branch given at the patch | ||
42 | # 3. current branch | ||
43 | self._branch = branch or valid_patch_branch or self._current_branch | ||
44 | |||
45 | # Target Commit | 32 | # Target Commit |
46 | # Priority (top has highest priority): | 33 | # Priority (top has highest priority): |
47 | # 1. commit given at cmd line | 34 | # 1. commit given at cmd line |
@@ -57,7 +44,7 @@ class PatchTestRepo(object): | |||
57 | 44 | ||
58 | # create working branch. Use the '-B' flag so that we just | 45 | # create working branch. Use the '-B' flag so that we just |
59 | # check out the existing one if it's there | 46 | # check out the existing one if it's there |
60 | self._repo.git.execute(['git', 'checkout', '-B', self._workingbranch, self._commit]) | 47 | self.repo.git.execute(['git', 'checkout', '-B', self._workingbranch, self._commit]) |
61 | 48 | ||
62 | self._patchmerged = False | 49 | self._patchmerged = False |
63 | 50 | ||
@@ -65,35 +52,13 @@ class PatchTestRepo(object): | |||
65 | self._patchcanbemerged = True | 52 | self._patchcanbemerged = True |
66 | try: | 53 | try: |
67 | # Make sure to get the absolute path of the file | 54 | # Make sure to get the absolute path of the file |
68 | self._repo.git.execute(['git', 'apply', '--check', os.path.abspath(self._patch.path)], with_exceptions=True) | 55 | self.repo.git.execute(['git', 'apply', '--check', os.path.abspath(self.patch.path)], with_exceptions=True) |
69 | except git.exc.GitCommandError as ce: | 56 | except git.exc.GitCommandError as ce: |
70 | self._patchcanbemerged = False | 57 | self._patchcanbemerged = False |
71 | 58 | ||
72 | # for debugging purposes, print all repo parameters | ||
73 | logger.debug("Parameters") | ||
74 | logger.debug("\tRepository : %s" % self._repodir) | ||
75 | logger.debug("\tTarget Commit : %s" % self._commit) | ||
76 | logger.debug("\tTarget Branch : %s" % self._branch) | ||
77 | logger.debug("\tWorking branch : %s" % self._workingbranch) | ||
78 | logger.debug("\tPatch : %s" % self._patch) | ||
79 | |||
80 | @property | ||
81 | def patch(self): | ||
82 | return self._patch.path | ||
83 | |||
84 | @property | ||
85 | def branch(self): | ||
86 | return self._branch | ||
87 | |||
88 | @property | ||
89 | def commit(self): | ||
90 | return self._commit | ||
91 | |||
92 | @property | ||
93 | def ismerged(self): | 59 | def ismerged(self): |
94 | return self._patchmerged | 60 | return self._patchmerged |
95 | 61 | ||
96 | @property | ||
97 | def canbemerged(self): | 62 | def canbemerged(self): |
98 | return self._patchcanbemerged | 63 | return self._patchcanbemerged |
99 | 64 | ||
@@ -103,7 +68,7 @@ class PatchTestRepo(object): | |||
103 | return None | 68 | return None |
104 | 69 | ||
105 | try: | 70 | try: |
106 | return self._repo.rev_parse(commit).hexsha | 71 | return self.repo.rev_parse(commit).hexsha |
107 | except Exception as e: | 72 | except Exception as e: |
108 | print(f"Couldn't find commit {commit} in repo") | 73 | print(f"Couldn't find commit {commit} in repo") |
109 | 74 | ||
@@ -111,10 +76,10 @@ class PatchTestRepo(object): | |||
111 | 76 | ||
112 | def merge(self): | 77 | def merge(self): |
113 | if self._patchcanbemerged: | 78 | if self._patchcanbemerged: |
114 | self._repo.git.execute(['git', 'am', '--keep-cr', os.path.abspath(self._patch.path)]) | 79 | self.repo.git.execute(['git', 'am', '--keep-cr', os.path.abspath(self.patch.path)]) |
115 | self._patchmerged = True | 80 | self._patchmerged = True |
116 | 81 | ||
117 | def clean(self): | 82 | def clean(self): |
118 | self._repo.git.execute(['git', 'checkout', self._current_branch]) | 83 | self.repo.git.execute(['git', 'checkout', self.current_branch]) |
119 | self._repo.git.execute(['git', 'branch', '-D', self._workingbranch]) | 84 | self.repo.git.execute(['git', 'branch', '-D', self._workingbranch]) |
120 | self._patchmerged = False | 85 | self._patchmerged = False |
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail index 0c40cdc1b6..30c1bc4624 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail | |||
@@ -1,32 +1,43 @@ | |||
1 | From 1fbb446d1849b1208012cbdae5d85d228cdbe4a6 Mon Sep 17 00:00:00 2001 | 1 | From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001 |
2 | From: First Last <first.last@example.com> | 2 | From: First Last <first.last@example.com> |
3 | Date: Tue, 29 Aug 2023 13:32:24 -0400 | 3 | Date: Fri, 31 May 2024 11:03:47 -0400 |
4 | Subject: [PATCH] selftest-hello: add a summary | 4 | Subject: [PATCH] selftest-hello: add selftest-hello-extra |
5 | 5 | ||
6 | This patch should fail the selftests because the author address is from the | 6 | This should fail the test_author_valid test. |
7 | invalid "example.com". | ||
8 | 7 | ||
9 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
10 | --- | 9 | --- |
11 | .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++- | 10 | .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++ |
12 | 1 file changed, 2 insertions(+), 1 deletion(-) | 11 | 1 file changed, 21 insertions(+) |
12 | create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb | ||
13 | 13 | ||
14 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 14 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
15 | index 547587bef4..491f0a3df7 100644 | 15 | new file mode 100644 |
16 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 16 | index 00000000000..f3dec1b220c |
17 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 17 | --- /dev/null |
18 | @@ -1,3 +1,4 @@ | 18 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
19 | +SUMMARY = "A cool sample" | 19 | @@ -0,0 +1,21 @@ |
20 | DESCRIPTION = "Simple helloworld application -- selftest variant" | 20 | +SUMMARY = "This is an example summary" |
21 | SECTION = "examples" | 21 | +DESCRIPTION = "Simple helloworld application -- selftest variant" |
22 | LICENSE = "MIT" | 22 | +SECTION = "examples" |
23 | @@ -16,4 +17,4 @@ do_install() { | 23 | +LICENSE = "MIT" |
24 | install -m 0755 helloworld ${D}${bindir} | 24 | +LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" |
25 | } | 25 | + |
26 | 26 | +SRC_URI = "file://helloworld.c" | |
27 | -BBCLASSEXTEND = "native nativesdk" | 27 | + |
28 | \ No newline at end of file | 28 | +S = "${WORKDIR}/sources" |
29 | +UNPACKDIR = "${S}" | ||
30 | + | ||
31 | +do_compile() { | ||
32 | + ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld | ||
33 | +} | ||
34 | + | ||
35 | +do_install() { | ||
36 | + install -d ${D}${bindir} | ||
37 | + install -m 0755 helloworld ${D}${bindir} | ||
38 | +} | ||
39 | + | ||
29 | +BBCLASSEXTEND = "native nativesdk" | 40 | +BBCLASSEXTEND = "native nativesdk" |
30 | -- | 41 | -- |
31 | 2.41.0 | 42 | 2.45.1 |
32 | 43 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass index cbb8ef2cef..6e82b08bc6 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass | |||
@@ -1,31 +1,43 @@ | |||
1 | From 1fbb446d1849b1208012cbdae5d85d228cdbe4a6 Mon Sep 17 00:00:00 2001 | 1 | From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001 |
2 | From: First Last <first.last@address.com> | 2 | From: First Last <first.last@address.com> |
3 | Date: Tue, 29 Aug 2023 13:32:24 -0400 | 3 | Date: Fri, 31 May 2024 11:03:47 -0400 |
4 | Subject: [PATCH] selftest-hello: add a summary | 4 | Subject: [PATCH] selftest-hello: add selftest-hello-extra |
5 | 5 | ||
6 | This patch should pass the selftests because the author address is in a valid format. | 6 | This should pass the test_author_valid test. |
7 | 7 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 9 | --- |
10 | .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++- | 10 | .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++ |
11 | 1 file changed, 2 insertions(+), 1 deletion(-) | 11 | 1 file changed, 21 insertions(+) |
12 | create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb | ||
12 | 13 | ||
13 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 14 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
14 | index 547587bef4..491f0a3df7 100644 | 15 | new file mode 100644 |
15 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 16 | index 00000000000..f3dec1b220c |
16 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 17 | --- /dev/null |
17 | @@ -1,3 +1,4 @@ | 18 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
18 | +SUMMARY = "A cool sample" | 19 | @@ -0,0 +1,21 @@ |
19 | DESCRIPTION = "Simple helloworld application -- selftest variant" | 20 | +SUMMARY = "This is an example summary" |
20 | SECTION = "examples" | 21 | +DESCRIPTION = "Simple helloworld application -- selftest variant" |
21 | LICENSE = "MIT" | 22 | +SECTION = "examples" |
22 | @@ -16,4 +17,4 @@ do_install() { | 23 | +LICENSE = "MIT" |
23 | install -m 0755 helloworld ${D}${bindir} | 24 | +LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" |
24 | } | 25 | + |
25 | 26 | +SRC_URI = "file://helloworld.c" | |
26 | -BBCLASSEXTEND = "native nativesdk" | 27 | + |
27 | \ No newline at end of file | 28 | +S = "${WORKDIR}/sources" |
29 | +UNPACKDIR = "${S}" | ||
30 | + | ||
31 | +do_compile() { | ||
32 | + ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld | ||
33 | +} | ||
34 | + | ||
35 | +do_install() { | ||
36 | + install -d ${D}${bindir} | ||
37 | + install -m 0755 helloworld ${D}${bindir} | ||
38 | +} | ||
39 | + | ||
28 | +BBCLASSEXTEND = "native nativesdk" | 40 | +BBCLASSEXTEND = "native nativesdk" |
29 | -- | 41 | -- |
30 | 2.41.0 | 42 | 2.45.1 |
31 | 43 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail index 3e2b81bca1..745a8f45d9 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail | |||
@@ -1,31 +1,43 @@ | |||
1 | From 1fbb446d1849b1208012cbdae5d85d228cdbe4a6 Mon Sep 17 00:00:00 2001 | 1 | From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001 |
2 | From: Upgrade Helper <auh@auh.yoctoproject.org> | 2 | From: Upgrade Helper <auh@auh.yoctoproject.org> |
3 | Date: Tue, 29 Aug 2023 13:32:24 -0400 | 3 | Date: Fri, 31 May 2024 11:03:47 -0400 |
4 | Subject: [PATCH] selftest-hello: add a summary | 4 | Subject: [PATCH] selftest-hello: add selftest-hello-extra |
5 | 5 | ||
6 | This patch should fail the selftests because AUH is an invalid sender. | 6 | This should fail the test_author_valid test. |
7 | 7 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 9 | --- |
10 | .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++- | 10 | .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++ |
11 | 1 file changed, 2 insertions(+), 1 deletion(-) | 11 | 1 file changed, 21 insertions(+) |
12 | create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb | ||
12 | 13 | ||
13 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 14 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
14 | index 547587bef4..491f0a3df7 100644 | 15 | new file mode 100644 |
15 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 16 | index 00000000000..f3dec1b220c |
16 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 17 | --- /dev/null |
17 | @@ -1,3 +1,4 @@ | 18 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
18 | +SUMMARY = "A cool sample" | 19 | @@ -0,0 +1,21 @@ |
19 | DESCRIPTION = "Simple helloworld application -- selftest variant" | 20 | +SUMMARY = "This is an example summary" |
20 | SECTION = "examples" | 21 | +DESCRIPTION = "Simple helloworld application -- selftest variant" |
21 | LICENSE = "MIT" | 22 | +SECTION = "examples" |
22 | @@ -16,4 +17,4 @@ do_install() { | 23 | +LICENSE = "MIT" |
23 | install -m 0755 helloworld ${D}${bindir} | 24 | +LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" |
24 | } | 25 | + |
25 | 26 | +SRC_URI = "file://helloworld.c" | |
26 | -BBCLASSEXTEND = "native nativesdk" | 27 | + |
27 | \ No newline at end of file | 28 | +S = "${WORKDIR}/sources" |
29 | +UNPACKDIR = "${S}" | ||
30 | + | ||
31 | +do_compile() { | ||
32 | + ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld | ||
33 | +} | ||
34 | + | ||
35 | +do_install() { | ||
36 | + install -d ${D}${bindir} | ||
37 | + install -m 0755 helloworld ${D}${bindir} | ||
38 | +} | ||
39 | + | ||
28 | +BBCLASSEXTEND = "native nativesdk" | 40 | +BBCLASSEXTEND = "native nativesdk" |
29 | -- | 41 | -- |
30 | 2.41.0 | 42 | 2.45.1 |
31 | 43 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass index f84e1265a7..56cb77fa69 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass | |||
@@ -1,31 +1,43 @@ | |||
1 | From 1fbb446d1849b1208012cbdae5d85d228cdbe4a6 Mon Sep 17 00:00:00 2001 | 1 | From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001 |
2 | From: First Last <averylongemailaddressthatishardtoread.from@address.com> | 2 | From: First Last <averylongemailaddressthatishardtoread.from@address.com> |
3 | Date: Tue, 29 Aug 2023 13:32:24 -0400 | 3 | Date: Fri, 31 May 2024 11:03:47 -0400 |
4 | Subject: [PATCH] selftest-hello: add a summary | 4 | Subject: [PATCH] selftest-hello: add selftest-hello-extra |
5 | 5 | ||
6 | This patch should pass the selftests because the author address is in a valid format. | 6 | This should pass the test_author_valid test. |
7 | 7 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 9 | --- |
10 | .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++- | 10 | .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++ |
11 | 1 file changed, 2 insertions(+), 1 deletion(-) | 11 | 1 file changed, 21 insertions(+) |
12 | create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb | ||
12 | 13 | ||
13 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 14 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
14 | index 547587bef4..491f0a3df7 100644 | 15 | new file mode 100644 |
15 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 16 | index 00000000000..f3dec1b220c |
16 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 17 | --- /dev/null |
17 | @@ -1,3 +1,4 @@ | 18 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
18 | +SUMMARY = "A cool sample" | 19 | @@ -0,0 +1,21 @@ |
19 | DESCRIPTION = "Simple helloworld application -- selftest variant" | 20 | +SUMMARY = "This is an example summary" |
20 | SECTION = "examples" | 21 | +DESCRIPTION = "Simple helloworld application -- selftest variant" |
21 | LICENSE = "MIT" | 22 | +SECTION = "examples" |
22 | @@ -16,4 +17,4 @@ do_install() { | 23 | +LICENSE = "MIT" |
23 | install -m 0755 helloworld ${D}${bindir} | 24 | +LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" |
24 | } | 25 | + |
25 | 26 | +SRC_URI = "file://helloworld.c" | |
26 | -BBCLASSEXTEND = "native nativesdk" | 27 | + |
27 | \ No newline at end of file | 28 | +S = "${WORKDIR}/sources" |
29 | +UNPACKDIR = "${S}" | ||
30 | + | ||
31 | +do_compile() { | ||
32 | + ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld | ||
33 | +} | ||
34 | + | ||
35 | +do_install() { | ||
36 | + install -d ${D}${bindir} | ||
37 | + install -m 0755 helloworld ${D}${bindir} | ||
38 | +} | ||
39 | + | ||
28 | +BBCLASSEXTEND = "native nativesdk" | 40 | +BBCLASSEXTEND = "native nativesdk" |
29 | -- | 41 | -- |
30 | 2.41.0 | 42 | 2.45.1 |
31 | 43 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail index 854d7eb8c7..6facb8c756 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail | |||
@@ -1,26 +1,67 @@ | |||
1 | From f06e14633723c1e78bc7a4b0fd0d3b79d09f0c68 Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: name@somedomain.com <email@address.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Thu, 2 May 2024 10:21:45 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] README.OE-Core.md: Add foo to header | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 |
5 | 5 | ||
6 | This test patch adds 'foo' to the header of README.OE-Core.md | 6 | This should fail the test_bugzilla_entry_format test. |
7 | 7 | ||
8 | [YOCTO 1234] | 8 | [YOCTO 1234] |
9 | CVE: CVE-1234-56789 | ||
9 | 10 | ||
10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 11 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
11 | --- | 12 | --- |
12 | README.OE-Core.md | 1 + | 13 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
13 | 1 file changed, 1 insertion(+) | 14 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
15 | 2 files changed, 29 insertions(+), 1 deletion(-) | ||
16 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | ||
14 | 17 | ||
15 | diff --git a/README.OE-Core.md b/README.OE-Core.md | 18 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
16 | index 687c58e410c..9d863891134 100644 | 19 | new file mode 100644 |
17 | --- a/README.OE-Core.md | 20 | index 00000000000..8a4f9329303 |
18 | +++ b/README.OE-Core.md | 21 | --- /dev/null |
19 | @@ -1,3 +1,4 @@ | 22 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
20 | +**** FOO **** | 23 | @@ -0,0 +1,26 @@ |
21 | OpenEmbedded-Core | 24 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
22 | ================= | 25 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
26 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | ||
27 | +Subject: [PATCH] Fix CVE-NOT-REAL | ||
28 | + | ||
29 | +CVE: CVE-1234-56789 | ||
30 | +Upstream-Status: Backport(http://example.com/example) | ||
31 | + | ||
32 | +Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | ||
33 | +--- | ||
34 | + strlen.c | 1 + | ||
35 | + 1 file changed, 1 insertion(+) | ||
36 | + | ||
37 | +diff --git a/strlen.c b/strlen.c | ||
38 | +index 1788f38..83d7918 100644 | ||
39 | +--- a/strlen.c | ||
40 | ++++ b/strlen.c | ||
41 | + | ||
42 | +int main() { | ||
43 | + | ||
44 | + printf("%d\n", str_len(string1)); | ||
45 | + printf("%d\n", str_len(string2)); | ||
46 | + printf("CVE FIXED!!!\n"); | ||
47 | + | ||
48 | + return 0; | ||
49 | +} | ||
50 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
51 | index 2dc352d479e..d937759f157 100644 | ||
52 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
53 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
54 | @@ -3,7 +3,9 @@ SECTION = "examples" | ||
55 | LICENSE = "MIT" | ||
56 | LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" | ||
23 | 57 | ||
58 | -SRC_URI = "file://helloworld.c" | ||
59 | +SRC_URI = "file://helloworld.c \ | ||
60 | + file://0001-Fix-CVE-1234-56789.patch \ | ||
61 | + " | ||
62 | |||
63 | S = "${WORKDIR}/sources" | ||
64 | UNPACKDIR = "${S}" | ||
24 | -- | 65 | -- |
25 | 2.44.0 | 66 | 2.45.1 |
26 | 67 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass index 2648b03364..2f35458b4f 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass | |||
@@ -1,25 +1,67 @@ | |||
1 | From fdfd605e565d874502522c4b70b786c8c5aa0bad Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: name@somedomain.com <email@address.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Fri, 17 Feb 2017 16:29:21 -0600 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] README: adds 'foo' to the header | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 |
5 | 5 | ||
6 | This test patch adds 'foo' to the header | 6 | This should pass the test_bugzilla_entry_format test. |
7 | 7 | ||
8 | [YOCTO #1234] | 8 | [YOCTO #1234] |
9 | CVE: CVE-1234-56789 | ||
9 | 10 | ||
10 | Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com> | 11 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
11 | --- | 12 | --- |
12 | README | 1 + | 13 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
13 | 1 file changed, 1 insertion(+) | 14 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
15 | 2 files changed, 29 insertions(+), 1 deletion(-) | ||
16 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | ||
14 | 17 | ||
15 | diff --git a/README b/README | 18 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
16 | index 521916cd4f..cdf29dcea3 100644 | 19 | new file mode 100644 |
17 | --- a/README | 20 | index 00000000000..8a4f9329303 |
18 | +++ b/README | 21 | --- /dev/null |
19 | @@ -1,3 +1,4 @@ | 22 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
20 | +**** FOO **** | 23 | @@ -0,0 +1,26 @@ |
21 | OpenEmbedded-Core | 24 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
22 | ================= | 25 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
26 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | ||
27 | +Subject: [PATCH] Fix CVE-NOT-REAL | ||
28 | + | ||
29 | +CVE: CVE-1234-56789 | ||
30 | +Upstream-Status: Backport(http://example.com/example) | ||
31 | + | ||
32 | +Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | ||
33 | +--- | ||
34 | + strlen.c | 1 + | ||
35 | + 1 file changed, 1 insertion(+) | ||
36 | + | ||
37 | +diff --git a/strlen.c b/strlen.c | ||
38 | +index 1788f38..83d7918 100644 | ||
39 | +--- a/strlen.c | ||
40 | ++++ b/strlen.c | ||
41 | + | ||
42 | +int main() { | ||
43 | + | ||
44 | + printf("%d\n", str_len(string1)); | ||
45 | + printf("%d\n", str_len(string2)); | ||
46 | + printf("CVE FIXED!!!\n"); | ||
47 | + | ||
48 | + return 0; | ||
49 | +} | ||
50 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
51 | index 2dc352d479e..d937759f157 100644 | ||
52 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
53 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
54 | @@ -3,7 +3,9 @@ SECTION = "examples" | ||
55 | LICENSE = "MIT" | ||
56 | LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" | ||
57 | |||
58 | -SRC_URI = "file://helloworld.c" | ||
59 | +SRC_URI = "file://helloworld.c \ | ||
60 | + file://0001-Fix-CVE-1234-56789.patch \ | ||
61 | + " | ||
62 | |||
63 | S = "${WORKDIR}/sources" | ||
64 | UNPACKDIR = "${S}" | ||
65 | -- | ||
66 | 2.45.1 | ||
23 | 67 | ||
24 | -- | ||
25 | 2.11.0 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail index 93ca0f9119..6f4e61c0da 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail | |||
@@ -1,22 +1,62 @@ | |||
1 | From 0a52a62c9430c05d22cb7f46380488f2280b69bb Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Fri, 1 Sep 2023 08:56:14 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] README.OE-Core.md: add foo | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 |
5 | 5 | ||
6 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 6 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
7 | --- | 7 | --- |
8 | README.OE-Core.md | 1 + | 8 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
9 | 1 file changed, 1 insertion(+) | 9 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
10 | 2 files changed, 29 insertions(+), 1 deletion(-) | ||
11 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | ||
10 | 12 | ||
11 | diff --git a/README.OE-Core.md b/README.OE-Core.md | 13 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
12 | index 2f2127fb03..48464252c8 100644 | 14 | new file mode 100644 |
13 | --- a/README.OE-Core.md | 15 | index 00000000000..8a4f9329303 |
14 | +++ b/README.OE-Core.md | 16 | --- /dev/null |
15 | @@ -1,3 +1,4 @@ | 17 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
16 | +** FOO ** | 18 | @@ -0,0 +1,26 @@ |
17 | OpenEmbedded-Core | 19 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
18 | ================= | 20 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
21 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | ||
22 | +Subject: [PATCH] Fix CVE-NOT-REAL | ||
23 | + | ||
24 | +CVE: CVE-1234-56789 | ||
25 | +Upstream-Status: Backport(http://example.com/example) | ||
26 | + | ||
27 | +Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | ||
28 | +--- | ||
29 | + strlen.c | 1 + | ||
30 | + 1 file changed, 1 insertion(+) | ||
31 | + | ||
32 | +diff --git a/strlen.c b/strlen.c | ||
33 | +index 1788f38..83d7918 100644 | ||
34 | +--- a/strlen.c | ||
35 | ++++ b/strlen.c | ||
36 | + | ||
37 | +int main() { | ||
38 | + | ||
39 | + printf("%d\n", str_len(string1)); | ||
40 | + printf("%d\n", str_len(string2)); | ||
41 | + printf("CVE FIXED!!!\n"); | ||
42 | + | ||
43 | + return 0; | ||
44 | +} | ||
45 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
46 | index 2dc352d479e..d937759f157 100644 | ||
47 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
48 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
49 | @@ -3,7 +3,9 @@ SECTION = "examples" | ||
50 | LICENSE = "MIT" | ||
51 | LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" | ||
19 | 52 | ||
53 | -SRC_URI = "file://helloworld.c" | ||
54 | +SRC_URI = "file://helloworld.c \ | ||
55 | + file://0001-Fix-CVE-1234-56789.patch \ | ||
56 | + " | ||
57 | |||
58 | S = "${WORKDIR}/sources" | ||
59 | UNPACKDIR = "${S}" | ||
20 | -- | 60 | -- |
21 | 2.41.0 | 61 | 2.45.1 |
22 | 62 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass index 5e3dcbd58b..3fbc23fd00 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass | |||
@@ -1,24 +1,66 @@ | |||
1 | From 0a52a62c9430c05d22cb7f46380488f2280b69bb Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Fri, 1 Sep 2023 08:56:14 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] README.OE-Core.md: add foo | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 |
5 | 5 | ||
6 | This is a commit message | 6 | This should pass the test_commit_message_presence test. |
7 | |||
8 | CVE: CVE-1234-56789 | ||
7 | 9 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 11 | --- |
10 | README.OE-Core.md | 1 + | 12 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
11 | 1 file changed, 1 insertion(+) | 13 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
14 | 2 files changed, 29 insertions(+), 1 deletion(-) | ||
15 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | ||
12 | 16 | ||
13 | diff --git a/README.OE-Core.md b/README.OE-Core.md | 17 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
14 | index 2f2127fb03..48464252c8 100644 | 18 | new file mode 100644 |
15 | --- a/README.OE-Core.md | 19 | index 00000000000..8a4f9329303 |
16 | +++ b/README.OE-Core.md | 20 | --- /dev/null |
17 | @@ -1,3 +1,4 @@ | 21 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
18 | +** FOO ** | 22 | @@ -0,0 +1,26 @@ |
19 | OpenEmbedded-Core | 23 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
20 | ================= | 24 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
25 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | ||
26 | +Subject: [PATCH] Fix CVE-NOT-REAL | ||
27 | + | ||
28 | +CVE: CVE-1234-56789 | ||
29 | +Upstream-Status: Backport(http://example.com/example) | ||
30 | + | ||
31 | +Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | ||
32 | +--- | ||
33 | + strlen.c | 1 + | ||
34 | + 1 file changed, 1 insertion(+) | ||
35 | + | ||
36 | +diff --git a/strlen.c b/strlen.c | ||
37 | +index 1788f38..83d7918 100644 | ||
38 | +--- a/strlen.c | ||
39 | ++++ b/strlen.c | ||
40 | + | ||
41 | +int main() { | ||
42 | + | ||
43 | + printf("%d\n", str_len(string1)); | ||
44 | + printf("%d\n", str_len(string2)); | ||
45 | + printf("CVE FIXED!!!\n"); | ||
46 | + | ||
47 | + return 0; | ||
48 | +} | ||
49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
50 | index 2dc352d479e..d937759f157 100644 | ||
51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
53 | @@ -3,7 +3,9 @@ SECTION = "examples" | ||
54 | LICENSE = "MIT" | ||
55 | LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" | ||
56 | |||
57 | -SRC_URI = "file://helloworld.c" | ||
58 | +SRC_URI = "file://helloworld.c \ | ||
59 | + file://0001-Fix-CVE-1234-56789.patch \ | ||
60 | + " | ||
21 | 61 | ||
62 | S = "${WORKDIR}/sources" | ||
63 | UNPACKDIR = "${S}" | ||
22 | -- | 64 | -- |
23 | 2.41.0 | 65 | 2.45.1 |
24 | 66 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_user_tags.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_user_tags.fail new file mode 100644 index 0000000000..9d54af9644 --- /dev/null +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_user_tags.fail | |||
@@ -0,0 +1,65 @@ | |||
1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 | ||
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | ||
3 | Date: Fri, 31 May 2024 09:54:50 -0400 | ||
4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 | ||
5 | |||
6 | This should fail the test_commit_message_user_tags test because of this | ||
7 | string: @teststring | ||
8 | |||
9 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | ||
10 | --- | ||
11 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ | ||
12 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- | ||
13 | 2 files changed, 29 insertions(+), 1 deletion(-) | ||
14 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | ||
15 | |||
16 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | ||
17 | new file mode 100644 | ||
18 | index 00000000000..8a4f9329303 | ||
19 | --- /dev/null | ||
20 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | ||
21 | @@ -0,0 +1,26 @@ | ||
22 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | ||
23 | +From: Trevor Gamblin <tgamblin@baylibre.com> | ||
24 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | ||
25 | +Subject: [PATCH] Fix CVE-NOT-REAL | ||
26 | + | ||
27 | +CVE: CVE-1234-56789 | ||
28 | +Upstream-Status: Backport(http://example.com/example) | ||
29 | + | ||
30 | +Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | ||
31 | +--- | ||
32 | + strlen.c | 1 + | ||
33 | + 1 file changed, 1 insertion(+) | ||
34 | + | ||
35 | +diff --git a/strlen.c b/strlen.c | ||
36 | +index 1788f38..83d7918 100644 | ||
37 | +--- a/strlen.c | ||
38 | ++++ b/strlen.c | ||
39 | + | ||
40 | +int main() { | ||
41 | + | ||
42 | + printf("%d\n", str_len(string1)); | ||
43 | + printf("%d\n", str_len(string2)); | ||
44 | + printf("CVE FIXED!!!\n"); | ||
45 | + | ||
46 | + return 0; | ||
47 | +} | ||
48 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
49 | index 2dc352d479e..d937759f157 100644 | ||
50 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
51 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
52 | @@ -3,7 +3,9 @@ SECTION = "examples" | ||
53 | LICENSE = "MIT" | ||
54 | LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" | ||
55 | |||
56 | -SRC_URI = "file://helloworld.c" | ||
57 | +SRC_URI = "file://helloworld.c \ | ||
58 | + file://0001-Fix-CVE-1234-56789.patch \ | ||
59 | + " | ||
60 | |||
61 | S = "${WORKDIR}/sources" | ||
62 | UNPACKDIR = "${S}" | ||
63 | -- | ||
64 | 2.45.1 | ||
65 | |||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_user_tags.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_user_tags.pass new file mode 100644 index 0000000000..57f2fc8a8e --- /dev/null +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_user_tags.pass | |||
@@ -0,0 +1,66 @@ | |||
1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 | ||
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | ||
3 | Date: Fri, 31 May 2024 09:54:50 -0400 | ||
4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 | ||
5 | |||
6 | This should pass the test_commit_message_user_tags test. | ||
7 | |||
8 | CVE: CVE-1234-56789 | ||
9 | |||
10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | ||
11 | --- | ||
12 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ | ||
13 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- | ||
14 | 2 files changed, 29 insertions(+), 1 deletion(-) | ||
15 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | ||
16 | |||
17 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | ||
18 | new file mode 100644 | ||
19 | index 00000000000..8a4f9329303 | ||
20 | --- /dev/null | ||
21 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | ||
22 | @@ -0,0 +1,26 @@ | ||
23 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | ||
24 | +From: Trevor Gamblin <tgamblin@baylibre.com> | ||
25 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | ||
26 | +Subject: [PATCH] Fix CVE-NOT-REAL | ||
27 | + | ||
28 | +CVE: CVE-1234-56789 | ||
29 | +Upstream-Status: Backport(http://example.com/example) | ||
30 | + | ||
31 | +Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | ||
32 | +--- | ||
33 | + strlen.c | 1 + | ||
34 | + 1 file changed, 1 insertion(+) | ||
35 | + | ||
36 | +diff --git a/strlen.c b/strlen.c | ||
37 | +index 1788f38..83d7918 100644 | ||
38 | +--- a/strlen.c | ||
39 | ++++ b/strlen.c | ||
40 | + | ||
41 | +int main() { | ||
42 | + | ||
43 | + printf("%d\n", str_len(string1)); | ||
44 | + printf("%d\n", str_len(string2)); | ||
45 | + printf("CVE FIXED!!!\n"); | ||
46 | + | ||
47 | + return 0; | ||
48 | +} | ||
49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
50 | index 2dc352d479e..d937759f157 100644 | ||
51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
53 | @@ -3,7 +3,9 @@ SECTION = "examples" | ||
54 | LICENSE = "MIT" | ||
55 | LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" | ||
56 | |||
57 | -SRC_URI = "file://helloworld.c" | ||
58 | +SRC_URI = "file://helloworld.c \ | ||
59 | + file://0001-Fix-CVE-1234-56789.patch \ | ||
60 | + " | ||
61 | |||
62 | S = "${WORKDIR}/sources" | ||
63 | UNPACKDIR = "${S}" | ||
64 | -- | ||
65 | 2.45.1 | ||
66 | |||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.1.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.1.fail deleted file mode 100644 index 9cc4aab38a..0000000000 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.1.fail +++ /dev/null | |||
@@ -1,36 +0,0 @@ | |||
1 | From d12db4cfa913b0e7a4b5bd858d3019acc53ce426 Mon Sep 17 00:00:00 2001 | ||
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | ||
3 | Date: Wed, 30 Aug 2023 12:15:00 -0400 | ||
4 | Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1 | ||
5 | |||
6 | This test should fail the mbox formatting test and the merge on head | ||
7 | test. | ||
8 | |||
9 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | ||
10 | --- | ||
11 | .../{selftest-hello_1.0.bb => selftest-hello_1.1.bb} | 3 ++- | ||
12 | 1 file changed, 2 insertions(+), 1 deletion(-) | ||
13 | rename meta-selftest/recipes-test/selftest-hello/{selftest-hello_1.0.bb => selftest-hello_1.1.bb} (88%) | ||
14 | |||
15 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb | ||
16 | similarity index 88% | ||
17 | rename from meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
18 | rename to meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb | ||
19 | index 547587bef4..acc388ec2c 100644 | ||
20 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
21 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb | ||
22 | @@ -1,3 +1,4 @@ | ||
23 | %+SUMMARY = "Hello!" | ||
24 | DESCRIPTION = "Simple helloworld application -- selftest variant" | ||
25 | SECTION = "examples" | ||
26 | LICENSE = "MIT" | ||
27 | @@ -16,4 +17,4 @@ do_install() { | ||
28 | install -m 0755 helloworld ${D}${bindir} | ||
29 | } | ||
30 | |||
31 | -BBCLASSEXTEND = "native nativesdk" | ||
32 | \ No newline at end of file | ||
33 | +BBCLASSEXTEND = "native nativesdk" | ||
34 | -- | ||
35 | 2.41.0 | ||
36 | |||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.2.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.2.fail deleted file mode 100644 index eca1c60085..0000000000 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.2.fail +++ /dev/null | |||
@@ -1,35 +0,0 @@ | |||
1 | From d12db4cfa913b0e7a4b5bd858d3019acc53ce426 Mon Sep 17 00:00:00 2001 | ||
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | ||
3 | Date: Wed, 30 Aug 2023 12:15:00 -0400 | ||
4 | Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1 | ||
5 | |||
6 | This test should fail the merge-on-head and mbox formatting tests. | ||
7 | |||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | ||
9 | --- | ||
10 | .../{selftest-hello_1.0.bb => selftest-hello_1.1.bb} | 3 ++- | ||
11 | 1 file changed, 2 insertions(+), 1 deletion(-) | ||
12 | rename meta-selftest/recipes-test/selftest-hello/{selftest-hello_1.0.bb => selftest-hello_1.1.bb} (88%) | ||
13 | |||
14 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb | ||
15 | similarity index 88% | ||
16 | rename from meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
17 | rename to meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb | ||
18 | index 547587bef4..acc388ec2c 100644 | ||
19 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
20 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb | ||
21 | @@ -1,3 +1,4 @@ | ||
22 | %+SUMMARY = "Hello!" | ||
23 | DESCRIPTION = "Simple helloworld application -- selftest variant" | ||
24 | SECTION = "examples" | ||
25 | LICENSE = "MIT" | ||
26 | @@ -16,4 +17,4 @@ do_install() { | ||
27 | install -m 0755 helloworld ${D}${bindir} | ||
28 | } | ||
29 | |||
30 | -BBCLASSEXTEND = "native nativesdk" | ||
31 | \ No newline at end of file | ||
32 | +BBCLASSEXTEND = "native nativesdk" | ||
33 | -- | ||
34 | 2.41.0 | ||
35 | |||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.fail new file mode 100644 index 0000000000..0dda6802d1 --- /dev/null +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.fail | |||
@@ -0,0 +1,66 @@ | |||
1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 | ||
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | ||
3 | Date: Fri, 31 May 2024 09:54:50 -0400 | ||
4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 | ||
5 | |||
6 | This should fail the test_mbox_format test. | ||
7 | |||
8 | CVE: CVE-1234-56789 | ||
9 | |||
10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | ||
11 | --- | ||
12 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ | ||
13 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- | ||
14 | 2 files changed, 29 insertions(+), 1 deletion(-) | ||
15 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | ||
16 | |||
17 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | ||
18 | new file mode 100644 | ||
19 | index 00000000000..8a4f9329303 | ||
20 | --- /dev/null | ||
21 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | ||
22 | @@ -0,0 +1,26 @@ | ||
23 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | ||
24 | +From: Trevor Gamblin <tgamblin@baylibre.com> | ||
25 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | ||
26 | +Subject: [PATCH] Fix CVE-NOT-REAL | ||
27 | + | ||
28 | +CVE: CVE-1234-56789 | ||
29 | +Upstream-Status: Backport(http://example.com/example) | ||
30 | + | ||
31 | +Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | ||
32 | +--- | ||
33 | + strlen.c | 1 + | ||
34 | + 1 file changed, 1 insertion(+) | ||
35 | + | ||
36 | +diff --git a/strlen.c b/strlen.c | ||
37 | +index 1788f38..83d7918 100644 | ||
38 | +--- a/strlen.c | ||
39 | ++++ b/strlen.c | ||
40 | + | ||
41 | +int main() { | ||
42 | + | ||
43 | + printf("%d\n", str_len(string1)); | ||
44 | + printf("%d\n", str_len(string2)); | ||
45 | + printf("CVE FIXED!!!\n"); | ||
46 | + | ||
47 | + return 0; | ||
48 | +} | ||
49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
50 | index 2dc352d479e..d937759f157 100644 | ||
51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
53 | @@ -3,7 +3,9 @@ SECTION = "examples" | ||
54 | LICENSE = "MIT" | ||
55 | LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" | ||
56 | |||
57 | -SRC_URI = "file://helloworld.c" | ||
58 | +SRC_URI = "file://helloworld.c \ | ||
59 | %+ file://0001-Fix-CVE-1234-56789.patch \ | ||
60 | + " | ||
61 | |||
62 | S = "${WORKDIR}/sources" | ||
63 | UNPACKDIR = "${S}" | ||
64 | -- | ||
65 | 2.45.1 | ||
66 | |||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass index 33940adffc..f06ae11d04 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass | |||
@@ -1,33 +1,66 @@ | |||
1 | From d12db4cfa913b0e7a4b5bd858d3019acc53ce426 Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Wed, 30 Aug 2023 12:15:00 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1 | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 |
5 | |||
6 | This should pass the test_mbox_format test. | ||
7 | |||
8 | CVE: CVE-1234-56789 | ||
5 | 9 | ||
6 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
7 | --- | 11 | --- |
8 | .../{selftest-hello_1.0.bb => selftest-hello_1.1.bb} | 3 ++- | 12 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
9 | 1 file changed, 2 insertions(+), 1 deletion(-) | 13 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
10 | rename meta-selftest/recipes-test/selftest-hello/{selftest-hello_1.0.bb => selftest-hello_1.1.bb} (88%) | 14 | 2 files changed, 29 insertions(+), 1 deletion(-) |
15 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | ||
11 | 16 | ||
12 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb | 17 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
13 | similarity index 88% | 18 | new file mode 100644 |
14 | rename from meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 19 | index 00000000000..8a4f9329303 |
15 | rename to meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb | 20 | --- /dev/null |
16 | index 547587bef4..acc388ec2c 100644 | 21 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
22 | @@ -0,0 +1,26 @@ | ||
23 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | ||
24 | +From: Trevor Gamblin <tgamblin@baylibre.com> | ||
25 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | ||
26 | +Subject: [PATCH] Fix CVE-NOT-REAL | ||
27 | + | ||
28 | +CVE: CVE-1234-56789 | ||
29 | +Upstream-Status: Backport(http://example.com/example) | ||
30 | + | ||
31 | +Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | ||
32 | +--- | ||
33 | + strlen.c | 1 + | ||
34 | + 1 file changed, 1 insertion(+) | ||
35 | + | ||
36 | +diff --git a/strlen.c b/strlen.c | ||
37 | +index 1788f38..83d7918 100644 | ||
38 | +--- a/strlen.c | ||
39 | ++++ b/strlen.c | ||
40 | + | ||
41 | +int main() { | ||
42 | + | ||
43 | + printf("%d\n", str_len(string1)); | ||
44 | + printf("%d\n", str_len(string2)); | ||
45 | + printf("CVE FIXED!!!\n"); | ||
46 | + | ||
47 | + return 0; | ||
48 | +} | ||
49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | ||
50 | index 2dc352d479e..d937759f157 100644 | ||
17 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
18 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb | 52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
19 | @@ -1,3 +1,4 @@ | 53 | @@ -3,7 +3,9 @@ SECTION = "examples" |
20 | +SUMMARY = "Hello!" | ||
21 | DESCRIPTION = "Simple helloworld application -- selftest variant" | ||
22 | SECTION = "examples" | ||
23 | LICENSE = "MIT" | 54 | LICENSE = "MIT" |
24 | @@ -16,4 +17,4 @@ do_install() { | 55 | LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" |
25 | install -m 0755 helloworld ${D}${bindir} | 56 | |
26 | } | 57 | -SRC_URI = "file://helloworld.c" |
58 | +SRC_URI = "file://helloworld.c \ | ||
59 | + file://0001-Fix-CVE-1234-56789.patch \ | ||
60 | + " | ||
27 | 61 | ||
28 | -BBCLASSEXTEND = "native nativesdk" | 62 | S = "${WORKDIR}/sources" |
29 | \ No newline at end of file | 63 | UNPACKDIR = "${S}" |
30 | +BBCLASSEXTEND = "native nativesdk" | ||
31 | -- | 64 | -- |
32 | 2.41.0 | 65 | 2.45.1 |
33 | 66 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip b/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip index 2a72457878..072ccc28c0 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip | |||
@@ -3,7 +3,7 @@ From: Trevor Gamblin <tgamblin@baylibre.com> | |||
3 | Date: Wed, 30 Aug 2023 12:15:00 -0400 | 3 | Date: Wed, 30 Aug 2023 12:15:00 -0400 |
4 | Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1 | 4 | Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1 |
5 | 5 | ||
6 | This file should pass the test_series_merge_on_head test. | 6 | This file should skip the test_series_merge_on_head test. |
7 | 7 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 9 | --- |
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail index cdbbc61b61..c5e4df2549 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail | |||
@@ -1,23 +1,25 @@ | |||
1 | From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Tue, 29 Aug 2023 14:12:27 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] selftest-hello% fix CVE-1234-56789 | 4 | Subject: [PATCH] selftest-hello% fix CVE-1234-56789 |
5 | 5 | ||
6 | This should fail the test_shortlog_format test. | ||
7 | |||
6 | CVE: CVE-1234-56789 | 8 | CVE: CVE-1234-56789 |
7 | 9 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 11 | --- |
10 | .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++ | 12 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
11 | .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- | 13 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
12 | 2 files changed, 31 insertions(+), 2 deletions(-) | 14 | 2 files changed, 29 insertions(+), 1 deletion(-) |
13 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 15 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
14 | 16 | ||
15 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 17 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
16 | new file mode 100644 | 18 | new file mode 100644 |
17 | index 0000000000..9219b8db62 | 19 | index 00000000000..8a4f9329303 |
18 | --- /dev/null | 20 | --- /dev/null |
19 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 21 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
20 | @@ -0,0 +1,27 @@ | 22 | @@ -0,0 +1,26 @@ |
21 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | 23 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
22 | +From: Trevor Gamblin <tgamblin@baylibre.com> | 24 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
23 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | 25 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 |
@@ -35,19 +37,17 @@ index 0000000000..9219b8db62 | |||
35 | +index 1788f38..83d7918 100644 | 37 | +index 1788f38..83d7918 100644 |
36 | +--- a/strlen.c | 38 | +--- a/strlen.c |
37 | ++++ b/strlen.c | 39 | ++++ b/strlen.c |
38 | +@@ -8,6 +8,7 @@ int main() { | ||
39 | + | ||
40 | + printf("%d\n", str_len(string1)); | ||
41 | + printf("%d\n", str_len(string2)); | ||
42 | ++ printf("CVE FIXED!!!\n"); | ||
43 | + | ||
44 | + return 0; | ||
45 | + } | ||
46 | +-- | ||
47 | +2.41.0 | ||
48 | + | 40 | + |
41 | +int main() { | ||
42 | + | ||
43 | + printf("%d\n", str_len(string1)); | ||
44 | + printf("%d\n", str_len(string2)); | ||
45 | + printf("CVE FIXED!!!\n"); | ||
46 | + | ||
47 | + return 0; | ||
48 | +} | ||
49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
50 | index 547587bef4..76975a6729 100644 | 50 | index 2dc352d479e..d937759f157 100644 |
51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
53 | @@ -3,7 +3,9 @@ SECTION = "examples" | 53 | @@ -3,7 +3,9 @@ SECTION = "examples" |
@@ -56,18 +56,11 @@ index 547587bef4..76975a6729 100644 | |||
56 | 56 | ||
57 | -SRC_URI = "file://helloworld.c" | 57 | -SRC_URI = "file://helloworld.c" |
58 | +SRC_URI = "file://helloworld.c \ | 58 | +SRC_URI = "file://helloworld.c \ |
59 | + file://CVE-1234-56789.patch \ | 59 | + file://0001-Fix-CVE-1234-56789.patch \ |
60 | + " | 60 | + " |
61 | |||
62 | S = "${WORKDIR}" | ||
63 | |||
64 | @@ -16,4 +18,4 @@ do_install() { | ||
65 | install -m 0755 helloworld ${D}${bindir} | ||
66 | } | ||
67 | 61 | ||
68 | -BBCLASSEXTEND = "native nativesdk" | 62 | S = "${WORKDIR}/sources" |
69 | \ No newline at end of file | 63 | UNPACKDIR = "${S}" |
70 | +BBCLASSEXTEND = "native nativesdk" | ||
71 | -- | 64 | -- |
72 | 2.41.0 | 65 | 2.45.1 |
73 | 66 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass index ef6017037c..4948e26afc 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass | |||
@@ -1,23 +1,25 @@ | |||
1 | From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Tue, 29 Aug 2023 14:12:27 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 |
5 | 5 | ||
6 | This should pass the test_shortlog_format test. | ||
7 | |||
6 | CVE: CVE-1234-56789 | 8 | CVE: CVE-1234-56789 |
7 | 9 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 11 | --- |
10 | .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++ | 12 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
11 | .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- | 13 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
12 | 2 files changed, 31 insertions(+), 2 deletions(-) | 14 | 2 files changed, 29 insertions(+), 1 deletion(-) |
13 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 15 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
14 | 16 | ||
15 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 17 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
16 | new file mode 100644 | 18 | new file mode 100644 |
17 | index 0000000000..9219b8db62 | 19 | index 00000000000..8a4f9329303 |
18 | --- /dev/null | 20 | --- /dev/null |
19 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 21 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
20 | @@ -0,0 +1,27 @@ | 22 | @@ -0,0 +1,26 @@ |
21 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | 23 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
22 | +From: Trevor Gamblin <tgamblin@baylibre.com> | 24 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
23 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | 25 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 |
@@ -35,19 +37,17 @@ index 0000000000..9219b8db62 | |||
35 | +index 1788f38..83d7918 100644 | 37 | +index 1788f38..83d7918 100644 |
36 | +--- a/strlen.c | 38 | +--- a/strlen.c |
37 | ++++ b/strlen.c | 39 | ++++ b/strlen.c |
38 | +@@ -8,6 +8,7 @@ int main() { | ||
39 | + | ||
40 | + printf("%d\n", str_len(string1)); | ||
41 | + printf("%d\n", str_len(string2)); | ||
42 | ++ printf("CVE FIXED!!!\n"); | ||
43 | + | ||
44 | + return 0; | ||
45 | + } | ||
46 | +-- | ||
47 | +2.41.0 | ||
48 | + | 40 | + |
41 | +int main() { | ||
42 | + | ||
43 | + printf("%d\n", str_len(string1)); | ||
44 | + printf("%d\n", str_len(string2)); | ||
45 | + printf("CVE FIXED!!!\n"); | ||
46 | + | ||
47 | + return 0; | ||
48 | +} | ||
49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
50 | index 547587bef4..76975a6729 100644 | 50 | index 2dc352d479e..d937759f157 100644 |
51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
53 | @@ -3,7 +3,9 @@ SECTION = "examples" | 53 | @@ -3,7 +3,9 @@ SECTION = "examples" |
@@ -56,18 +56,11 @@ index 547587bef4..76975a6729 100644 | |||
56 | 56 | ||
57 | -SRC_URI = "file://helloworld.c" | 57 | -SRC_URI = "file://helloworld.c" |
58 | +SRC_URI = "file://helloworld.c \ | 58 | +SRC_URI = "file://helloworld.c \ |
59 | + file://CVE-1234-56789.patch \ | 59 | + file://0001-Fix-CVE-1234-56789.patch \ |
60 | + " | 60 | + " |
61 | |||
62 | S = "${WORKDIR}" | ||
63 | |||
64 | @@ -16,4 +18,4 @@ do_install() { | ||
65 | install -m 0755 helloworld ${D}${bindir} | ||
66 | } | ||
67 | 61 | ||
68 | -BBCLASSEXTEND = "native nativesdk" | 62 | S = "${WORKDIR}/sources" |
69 | \ No newline at end of file | 63 | UNPACKDIR = "${S}" |
70 | +BBCLASSEXTEND = "native nativesdk" | ||
71 | -- | 64 | -- |
72 | 2.41.0 | 65 | 2.45.1 |
73 | 66 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail index 629e78540b..4ed1242821 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail | |||
@@ -1,23 +1,25 @@ | |||
1 | From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Tue, 29 Aug 2023 14:12:27 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] selftest-hello: this is a very long commit shortlog with way too many words included in it to pass the test | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 this is a very long commit shortlog with way too many words included in it to pass the test |
5 | |||
6 | This should fail the test_shortlong_length test. | ||
5 | 7 | ||
6 | CVE: CVE-1234-56789 | 8 | CVE: CVE-1234-56789 |
7 | 9 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 11 | --- |
10 | .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++ | 12 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
11 | .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- | 13 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
12 | 2 files changed, 31 insertions(+), 2 deletions(-) | 14 | 2 files changed, 29 insertions(+), 1 deletion(-) |
13 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 15 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
14 | 16 | ||
15 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 17 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
16 | new file mode 100644 | 18 | new file mode 100644 |
17 | index 0000000000..9219b8db62 | 19 | index 00000000000..8a4f9329303 |
18 | --- /dev/null | 20 | --- /dev/null |
19 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 21 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
20 | @@ -0,0 +1,27 @@ | 22 | @@ -0,0 +1,26 @@ |
21 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | 23 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
22 | +From: Trevor Gamblin <tgamblin@baylibre.com> | 24 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
23 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | 25 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 |
@@ -35,19 +37,17 @@ index 0000000000..9219b8db62 | |||
35 | +index 1788f38..83d7918 100644 | 37 | +index 1788f38..83d7918 100644 |
36 | +--- a/strlen.c | 38 | +--- a/strlen.c |
37 | ++++ b/strlen.c | 39 | ++++ b/strlen.c |
38 | +@@ -8,6 +8,7 @@ int main() { | ||
39 | + | ||
40 | + printf("%d\n", str_len(string1)); | ||
41 | + printf("%d\n", str_len(string2)); | ||
42 | ++ printf("CVE FIXED!!!\n"); | ||
43 | + | ||
44 | + return 0; | ||
45 | + } | ||
46 | +-- | ||
47 | +2.41.0 | ||
48 | + | 40 | + |
41 | +int main() { | ||
42 | + | ||
43 | + printf("%d\n", str_len(string1)); | ||
44 | + printf("%d\n", str_len(string2)); | ||
45 | + printf("CVE FIXED!!!\n"); | ||
46 | + | ||
47 | + return 0; | ||
48 | +} | ||
49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
50 | index 547587bef4..76975a6729 100644 | 50 | index 2dc352d479e..d937759f157 100644 |
51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
53 | @@ -3,7 +3,9 @@ SECTION = "examples" | 53 | @@ -3,7 +3,9 @@ SECTION = "examples" |
@@ -57,17 +57,10 @@ index 547587bef4..76975a6729 100644 | |||
57 | -SRC_URI = "file://helloworld.c" | 57 | -SRC_URI = "file://helloworld.c" |
58 | +SRC_URI = "file://helloworld.c \ | 58 | +SRC_URI = "file://helloworld.c \ |
59 | + file://0001-Fix-CVE-1234-56789.patch \ | 59 | + file://0001-Fix-CVE-1234-56789.patch \ |
60 | + " | 60 | + " |
61 | |||
62 | S = "${WORKDIR}" | ||
63 | |||
64 | @@ -16,4 +18,4 @@ do_install() { | ||
65 | install -m 0755 helloworld ${D}${bindir} | ||
66 | } | ||
67 | 61 | ||
68 | -BBCLASSEXTEND = "native nativesdk" | 62 | S = "${WORKDIR}/sources" |
69 | \ No newline at end of file | 63 | UNPACKDIR = "${S}" |
70 | +BBCLASSEXTEND = "native nativesdk" | ||
71 | -- | 64 | -- |
72 | 2.41.0 | 65 | 2.45.1 |
73 | 66 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass index ef6017037c..ef5066a650 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass | |||
@@ -1,23 +1,25 @@ | |||
1 | From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Tue, 29 Aug 2023 14:12:27 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 |
5 | 5 | ||
6 | This should pass the test_shortlog_length test. | ||
7 | |||
6 | CVE: CVE-1234-56789 | 8 | CVE: CVE-1234-56789 |
7 | 9 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 11 | --- |
10 | .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++ | 12 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
11 | .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- | 13 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
12 | 2 files changed, 31 insertions(+), 2 deletions(-) | 14 | 2 files changed, 29 insertions(+), 1 deletion(-) |
13 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 15 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
14 | 16 | ||
15 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 17 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
16 | new file mode 100644 | 18 | new file mode 100644 |
17 | index 0000000000..9219b8db62 | 19 | index 00000000000..8a4f9329303 |
18 | --- /dev/null | 20 | --- /dev/null |
19 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 21 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
20 | @@ -0,0 +1,27 @@ | 22 | @@ -0,0 +1,26 @@ |
21 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | 23 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
22 | +From: Trevor Gamblin <tgamblin@baylibre.com> | 24 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
23 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | 25 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 |
@@ -35,19 +37,17 @@ index 0000000000..9219b8db62 | |||
35 | +index 1788f38..83d7918 100644 | 37 | +index 1788f38..83d7918 100644 |
36 | +--- a/strlen.c | 38 | +--- a/strlen.c |
37 | ++++ b/strlen.c | 39 | ++++ b/strlen.c |
38 | +@@ -8,6 +8,7 @@ int main() { | ||
39 | + | ||
40 | + printf("%d\n", str_len(string1)); | ||
41 | + printf("%d\n", str_len(string2)); | ||
42 | ++ printf("CVE FIXED!!!\n"); | ||
43 | + | ||
44 | + return 0; | ||
45 | + } | ||
46 | +-- | ||
47 | +2.41.0 | ||
48 | + | 40 | + |
41 | +int main() { | ||
42 | + | ||
43 | + printf("%d\n", str_len(string1)); | ||
44 | + printf("%d\n", str_len(string2)); | ||
45 | + printf("CVE FIXED!!!\n"); | ||
46 | + | ||
47 | + return 0; | ||
48 | +} | ||
49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
50 | index 547587bef4..76975a6729 100644 | 50 | index 2dc352d479e..d937759f157 100644 |
51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
53 | @@ -3,7 +3,9 @@ SECTION = "examples" | 53 | @@ -3,7 +3,9 @@ SECTION = "examples" |
@@ -56,18 +56,11 @@ index 547587bef4..76975a6729 100644 | |||
56 | 56 | ||
57 | -SRC_URI = "file://helloworld.c" | 57 | -SRC_URI = "file://helloworld.c" |
58 | +SRC_URI = "file://helloworld.c \ | 58 | +SRC_URI = "file://helloworld.c \ |
59 | + file://CVE-1234-56789.patch \ | 59 | + file://0001-Fix-CVE-1234-56789.patch \ |
60 | + " | 60 | + " |
61 | |||
62 | S = "${WORKDIR}" | ||
63 | |||
64 | @@ -16,4 +18,4 @@ do_install() { | ||
65 | install -m 0755 helloworld ${D}${bindir} | ||
66 | } | ||
67 | 61 | ||
68 | -BBCLASSEXTEND = "native nativesdk" | 62 | S = "${WORKDIR}/sources" |
69 | \ No newline at end of file | 63 | UNPACKDIR = "${S}" |
70 | +BBCLASSEXTEND = "native nativesdk" | ||
71 | -- | 64 | -- |
72 | 2.41.0 | 65 | 2.45.1 |
73 | 66 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail index 35d92aeed7..4ede7271ee 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail | |||
@@ -1,22 +1,24 @@ | |||
1 | From 14d72f6973270f78455a8628143f2cff90e8f41e Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Tue, 29 Aug 2023 14:12:27 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 |
5 | 5 | ||
6 | This should fail the test_signed_off_by_presence test. | ||
7 | |||
6 | CVE: CVE-1234-56789 | 8 | CVE: CVE-1234-56789 |
7 | 9 | ||
8 | --- | 10 | --- |
9 | .../selftest-hello/files/CVE-1234-56789.patch | 27 +++++++++++++++++++ | 11 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
10 | .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- | 12 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
11 | 2 files changed, 31 insertions(+), 2 deletions(-) | 13 | 2 files changed, 29 insertions(+), 1 deletion(-) |
12 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 14 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
13 | 15 | ||
14 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 16 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
15 | new file mode 100644 | 17 | new file mode 100644 |
16 | index 0000000000..869cfb6fe5 | 18 | index 00000000000..8a4f9329303 |
17 | --- /dev/null | 19 | --- /dev/null |
18 | +++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 20 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
19 | @@ -0,0 +1,27 @@ | 21 | @@ -0,0 +1,26 @@ |
20 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | 22 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
21 | +From: Trevor Gamblin <tgamblin@baylibre.com> | 23 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
22 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | 24 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 |
@@ -34,18 +36,17 @@ index 0000000000..869cfb6fe5 | |||
34 | +index 1788f38..83d7918 100644 | 36 | +index 1788f38..83d7918 100644 |
35 | +--- a/strlen.c | 37 | +--- a/strlen.c |
36 | ++++ b/strlen.c | 38 | ++++ b/strlen.c |
37 | +@@ -8,6 +8,7 @@ int main() { | 39 | + |
38 | + | 40 | +int main() { |
39 | + printf("%d\n", str_len(string1)); | 41 | + |
40 | + printf("%d\n", str_len(string2)); | 42 | + printf("%d\n", str_len(string1)); |
41 | ++ printf("CVE FIXED!!!\n"); | 43 | + printf("%d\n", str_len(string2)); |
42 | + | 44 | + printf("CVE FIXED!!!\n"); |
43 | + return 0; | 45 | + |
44 | + } | 46 | + return 0; |
45 | +-- | 47 | +} |
46 | +2.41.0 | ||
47 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 48 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
48 | index 547587bef4..76975a6729 100644 | 49 | index 2dc352d479e..d937759f157 100644 |
49 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 50 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
50 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 51 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
51 | @@ -3,7 +3,9 @@ SECTION = "examples" | 52 | @@ -3,7 +3,9 @@ SECTION = "examples" |
@@ -54,18 +55,11 @@ index 547587bef4..76975a6729 100644 | |||
54 | 55 | ||
55 | -SRC_URI = "file://helloworld.c" | 56 | -SRC_URI = "file://helloworld.c" |
56 | +SRC_URI = "file://helloworld.c \ | 57 | +SRC_URI = "file://helloworld.c \ |
57 | + file://CVE-1234-56789.patch \ | 58 | + file://0001-Fix-CVE-1234-56789.patch \ |
58 | + " | 59 | + " |
59 | |||
60 | S = "${WORKDIR}" | ||
61 | |||
62 | @@ -16,4 +18,4 @@ do_install() { | ||
63 | install -m 0755 helloworld ${D}${bindir} | ||
64 | } | ||
65 | 60 | ||
66 | -BBCLASSEXTEND = "native nativesdk" | 61 | S = "${WORKDIR}/sources" |
67 | \ No newline at end of file | 62 | UNPACKDIR = "${S}" |
68 | +BBCLASSEXTEND = "native nativesdk" | ||
69 | -- | 63 | -- |
70 | 2.41.0 | 64 | 2.45.1 |
71 | 65 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail index 68f38dee06..f7c3f5145a 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail | |||
@@ -1,23 +1,25 @@ | |||
1 | From 14d72f6973270f78455a8628143f2cff90e8f41e Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Tue, 29 Aug 2023 14:12:27 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 |
5 | 5 | ||
6 | This should fail the test_signed_off_by_presence test. | ||
7 | |||
6 | CVE: CVE-1234-56789 | 8 | CVE: CVE-1234-56789 |
7 | 9 | ||
8 | Approved: Trevor Gamblin <tgamblin@baylibre.com> | 10 | Approved-of-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 11 | --- |
10 | .../selftest-hello/files/CVE-1234-56789.patch | 27 +++++++++++++++++++ | 12 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
11 | .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- | 13 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
12 | 2 files changed, 31 insertions(+), 2 deletions(-) | 14 | 2 files changed, 29 insertions(+), 1 deletion(-) |
13 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 15 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
14 | 16 | ||
15 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 17 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
16 | new file mode 100644 | 18 | new file mode 100644 |
17 | index 0000000000..869cfb6fe5 | 19 | index 00000000000..8a4f9329303 |
18 | --- /dev/null | 20 | --- /dev/null |
19 | +++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 21 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
20 | @@ -0,0 +1,27 @@ | 22 | @@ -0,0 +1,26 @@ |
21 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | 23 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
22 | +From: Trevor Gamblin <tgamblin@baylibre.com> | 24 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
23 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | 25 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 |
@@ -35,18 +37,17 @@ index 0000000000..869cfb6fe5 | |||
35 | +index 1788f38..83d7918 100644 | 37 | +index 1788f38..83d7918 100644 |
36 | +--- a/strlen.c | 38 | +--- a/strlen.c |
37 | ++++ b/strlen.c | 39 | ++++ b/strlen.c |
38 | +@@ -8,6 +8,7 @@ int main() { | 40 | + |
39 | + | 41 | +int main() { |
40 | + printf("%d\n", str_len(string1)); | 42 | + |
41 | + printf("%d\n", str_len(string2)); | 43 | + printf("%d\n", str_len(string1)); |
42 | ++ printf("CVE FIXED!!!\n"); | 44 | + printf("%d\n", str_len(string2)); |
43 | + | 45 | + printf("CVE FIXED!!!\n"); |
44 | + return 0; | 46 | + |
45 | + } | 47 | + return 0; |
46 | +-- | 48 | +} |
47 | +2.41.0 | ||
48 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
49 | index 547587bef4..76975a6729 100644 | 50 | index 2dc352d479e..d937759f157 100644 |
50 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
51 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
52 | @@ -3,7 +3,9 @@ SECTION = "examples" | 53 | @@ -3,7 +3,9 @@ SECTION = "examples" |
@@ -55,18 +56,11 @@ index 547587bef4..76975a6729 100644 | |||
55 | 56 | ||
56 | -SRC_URI = "file://helloworld.c" | 57 | -SRC_URI = "file://helloworld.c" |
57 | +SRC_URI = "file://helloworld.c \ | 58 | +SRC_URI = "file://helloworld.c \ |
58 | + file://CVE-1234-56789.patch \ | 59 | + file://0001-Fix-CVE-1234-56789.patch \ |
59 | + " | 60 | + " |
60 | |||
61 | S = "${WORKDIR}" | ||
62 | |||
63 | @@ -16,4 +18,4 @@ do_install() { | ||
64 | install -m 0755 helloworld ${D}${bindir} | ||
65 | } | ||
66 | 61 | ||
67 | -BBCLASSEXTEND = "native nativesdk" | 62 | S = "${WORKDIR}/sources" |
68 | \ No newline at end of file | 63 | UNPACKDIR = "${S}" |
69 | +BBCLASSEXTEND = "native nativesdk" | ||
70 | -- | 64 | -- |
71 | 2.41.0 | 65 | 2.45.1 |
72 | 66 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass index ea34c76f0d..2661c1416f 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass | |||
@@ -1,23 +1,25 @@ | |||
1 | From 14d72f6973270f78455a8628143f2cff90e8f41e Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Tue, 29 Aug 2023 14:12:27 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 |
5 | 5 | ||
6 | This should pass the test_signed_off_by_presence test. | ||
7 | |||
6 | CVE: CVE-1234-56789 | 8 | CVE: CVE-1234-56789 |
7 | 9 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 11 | --- |
10 | .../selftest-hello/files/CVE-1234-56789.patch | 27 +++++++++++++++++++ | 12 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
11 | .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- | 13 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
12 | 2 files changed, 31 insertions(+), 2 deletions(-) | 14 | 2 files changed, 29 insertions(+), 1 deletion(-) |
13 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 15 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
14 | 16 | ||
15 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 17 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
16 | new file mode 100644 | 18 | new file mode 100644 |
17 | index 0000000000..869cfb6fe5 | 19 | index 00000000000..8a4f9329303 |
18 | --- /dev/null | 20 | --- /dev/null |
19 | +++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 21 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
20 | @@ -0,0 +1,27 @@ | 22 | @@ -0,0 +1,26 @@ |
21 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | 23 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
22 | +From: Trevor Gamblin <tgamblin@baylibre.com> | 24 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
23 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | 25 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 |
@@ -35,18 +37,17 @@ index 0000000000..869cfb6fe5 | |||
35 | +index 1788f38..83d7918 100644 | 37 | +index 1788f38..83d7918 100644 |
36 | +--- a/strlen.c | 38 | +--- a/strlen.c |
37 | ++++ b/strlen.c | 39 | ++++ b/strlen.c |
38 | +@@ -8,6 +8,7 @@ int main() { | 40 | + |
39 | + | 41 | +int main() { |
40 | + printf("%d\n", str_len(string1)); | 42 | + |
41 | + printf("%d\n", str_len(string2)); | 43 | + printf("%d\n", str_len(string1)); |
42 | ++ printf("CVE FIXED!!!\n"); | 44 | + printf("%d\n", str_len(string2)); |
43 | + | 45 | + printf("CVE FIXED!!!\n"); |
44 | + return 0; | 46 | + |
45 | + } | 47 | + return 0; |
46 | +-- | 48 | +} |
47 | +2.41.0 | ||
48 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
49 | index 547587bef4..76975a6729 100644 | 50 | index 2dc352d479e..d937759f157 100644 |
50 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
51 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
52 | @@ -3,7 +3,9 @@ SECTION = "examples" | 53 | @@ -3,7 +3,9 @@ SECTION = "examples" |
@@ -55,18 +56,11 @@ index 547587bef4..76975a6729 100644 | |||
55 | 56 | ||
56 | -SRC_URI = "file://helloworld.c" | 57 | -SRC_URI = "file://helloworld.c" |
57 | +SRC_URI = "file://helloworld.c \ | 58 | +SRC_URI = "file://helloworld.c \ |
58 | + file://CVE-1234-56789.patch \ | 59 | + file://0001-Fix-CVE-1234-56789.patch \ |
59 | + " | 60 | + " |
60 | |||
61 | S = "${WORKDIR}" | ||
62 | |||
63 | @@ -16,4 +18,4 @@ do_install() { | ||
64 | install -m 0755 helloworld ${D}${bindir} | ||
65 | } | ||
66 | 61 | ||
67 | -BBCLASSEXTEND = "native nativesdk" | 62 | S = "${WORKDIR}/sources" |
68 | \ No newline at end of file | 63 | UNPACKDIR = "${S}" |
69 | +BBCLASSEXTEND = "native nativesdk" | ||
70 | -- | 64 | -- |
71 | 2.41.0 | 65 | 2.45.1 |
72 | 66 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail index 3574463ade..dccafcd9bc 100644 --- a/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail +++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail | |||
@@ -1,30 +1,25 @@ | |||
1 | From c4ca86b9cca3643097db0328e2f34dccffbba309 Mon Sep 17 00:00:00 2001 | 1 | From 60450eefbc2c438a37c5e08759d021b18f0df0a3 Mon Sep 17 00:00:00 2001 |
2 | From: =?UTF-8?q?Simone=20Wei=C3=9F?= <simone.p.weiss@posteo.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Sat, 10 Feb 2024 13:18:44 +0100 | 3 | Date: Fri, 31 May 2024 09:18:17 -0400 |
4 | Subject: [PATCH] selftest-hello: add CVE_CHECK_IGNORE | 4 | Subject: [PATCH] selftest-hello: add CVE_CHECK_IGNORE |
5 | MIME-Version: 1.0 | ||
6 | Content-Type: text/plain; charset=UTF-8 | ||
7 | Content-Transfer-Encoding: 8bit | ||
8 | 5 | ||
9 | This should fail the test_cve_tag_format selftest. | 6 | This should fail the test_cve_tag_format selftest. |
10 | 7 | ||
11 | Signed-off-by: Simone Weiß <simone.p.weiss@posteo.com> | 8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
12 | --- | 9 | --- |
13 | .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++- | 10 | meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 1 + |
14 | 1 file changed, 2 insertions(+), 1 deletion(-) | 11 | 1 file changed, 1 insertion(+) |
15 | 12 | ||
16 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 13 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
17 | index 547587bef4..3ef9b87c34 100644 | 14 | index 2dc352d479e..cc103de6e2e 100644 |
18 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 15 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
19 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 16 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
20 | @@ -16,4 +16,5 @@ do_install() { | 17 | @@ -17,4 +17,5 @@ do_install() { |
21 | install -m 0755 helloworld ${D}${bindir} | 18 | install -m 0755 helloworld ${D}${bindir} |
22 | } | 19 | } |
23 | 20 | ||
24 | -BBCLASSEXTEND = "native nativesdk" | ||
25 | \ No newline at end of file | ||
26 | +CVE_CHECK_IGNORE = "CVE-2024-12345" | 21 | +CVE_CHECK_IGNORE = "CVE-2024-12345" |
27 | +BBCLASSEXTEND = "native nativesdk" | 22 | BBCLASSEXTEND = "native nativesdk" |
28 | -- | 23 | -- |
29 | 2.39.2 | 24 | 2.45.1 |
30 | 25 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass index 10f942a6eb..93a6cc91fb 100644 --- a/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass +++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass | |||
@@ -1,31 +1,25 @@ | |||
1 | From 7d4d3fee0c7111830ee9b2b049ae3ce265b26030 Mon Sep 17 00:00:00 2001 | 1 | From f91073242268d2b2c1a1a705e7fd585679f78a59 Mon Sep 17 00:00:00 2001 |
2 | From: =?UTF-8?q?Simone=20Wei=C3=9F?= <simone.p.weiss@posteo.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Sat, 10 Feb 2024 13:23:56 +0100 | 3 | Date: Fri, 31 May 2024 09:18:17 -0400 |
4 | Subject: [PATCH] selftest-hello: add CVE_STATUS | 4 | Subject: [PATCH] selftest-hello: add CVE_STATUS |
5 | MIME-Version: 1.0 | ||
6 | Content-Type: text/plain; charset=UTF-8 | ||
7 | Content-Transfer-Encoding: 8bit | ||
8 | 5 | ||
9 | This should pass the test_cve_tag_format selftest. | 6 | This should pass the test_cve_tag_format selftest. |
10 | 7 | ||
11 | Signed-off-by: Simone Weiß <simone.p.weiss@posteo.com> | 8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
12 | --- | 9 | --- |
13 | .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 +++- | 10 | meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 1 + |
14 | 1 file changed, 3 insertions(+), 1 deletion(-) | 11 | 1 file changed, 1 insertion(+) |
15 | 12 | ||
16 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 13 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
17 | index 547587bef4..9908b3b417 100644 | 14 | index 2dc352d479e..88c5c98608f 100644 |
18 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 15 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
19 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 16 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
20 | @@ -16,4 +16,6 @@ do_install() { | 17 | @@ -17,4 +17,5 @@ do_install() { |
21 | install -m 0755 helloworld ${D}${bindir} | 18 | install -m 0755 helloworld ${D}${bindir} |
22 | } | 19 | } |
23 | 20 | ||
24 | -BBCLASSEXTEND = "native nativesdk" | ||
25 | \ No newline at end of file | ||
26 | +CVE_STATUS[CVE-2024-12345] = "not-applicable-platform: Issue only applies on Windows" | 21 | +CVE_STATUS[CVE-2024-12345] = "not-applicable-platform: Issue only applies on Windows" |
27 | + | 22 | BBCLASSEXTEND = "native nativesdk" |
28 | +BBCLASSEXTEND = "native nativesdk" | ||
29 | -- | 23 | -- |
30 | 2.39.2 | 24 | 2.45.1 |
31 | 25 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail index ab6c52c374..61b3784e3c 100644 --- a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail +++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail | |||
@@ -1,19 +1,17 @@ | |||
1 | From f89919ea86d38404dd621521680a0162367bb965 Mon Sep 17 00:00:00 2001 | 1 | From 974c3a143bc67faaff9abcc0a06a3d5e692fc660 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Wed, 6 Sep 2023 09:09:27 -0400 | 3 | Date: Fri, 31 May 2024 11:51:15 -0400 |
4 | Subject: [PATCH] selftest-hello: update LIC_FILES_CHKSUM | 4 | Subject: [PATCH] selftest-hello: update LIC_FILES_CHKSUM |
5 | 5 | ||
6 | This test should fail the | 6 | This should fail the test_lic_files_chksum_modified_not_mentioned test. |
7 | test_metadata_lic_files_chksum.LicFilesChkSum.test_lic_files_chksum_modified_not_mentioned | ||
8 | test. | ||
9 | 7 | ||
10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
11 | --- | 9 | --- |
12 | .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 ++-- | 10 | meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 2 +- |
13 | 1 file changed, 2 insertions(+), 2 deletions(-) | 11 | 1 file changed, 1 insertion(+), 1 deletion(-) |
14 | 12 | ||
15 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 13 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
16 | index 547587bef4..65dda40aba 100644 | 14 | index 2dc352d479e..356921db1dd 100644 |
17 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 15 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
18 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 16 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
19 | @@ -1,7 +1,7 @@ | 17 | @@ -1,7 +1,7 @@ |
@@ -25,13 +23,6 @@ index 547587bef4..65dda40aba 100644 | |||
25 | 23 | ||
26 | SRC_URI = "file://helloworld.c" | 24 | SRC_URI = "file://helloworld.c" |
27 | 25 | ||
28 | @@ -16,4 +16,4 @@ do_install() { | ||
29 | install -m 0755 helloworld ${D}${bindir} | ||
30 | } | ||
31 | |||
32 | -BBCLASSEXTEND = "native nativesdk" | ||
33 | \ No newline at end of file | ||
34 | +BBCLASSEXTEND = "native nativesdk" | ||
35 | -- | 26 | -- |
36 | 2.41.0 | 27 | 2.45.1 |
37 | 28 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass index 99d9f144da..b7be1e8e55 100644 --- a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass +++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass | |||
@@ -1,21 +1,19 @@ | |||
1 | From f89919ea86d38404dd621521680a0162367bb965 Mon Sep 17 00:00:00 2001 | 1 | From 974c3a143bc67faaff9abcc0a06a3d5e692fc660 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Wed, 6 Sep 2023 09:09:27 -0400 | 3 | Date: Fri, 31 May 2024 11:51:15 -0400 |
4 | Subject: [PATCH] selftest-hello: update LIC_FILES_CHKSUM | 4 | Subject: [PATCH] selftest-hello: update LIC_FILES_CHKSUM |
5 | 5 | ||
6 | License-Update: Fix checksum | 6 | This should pass the test_lic_files_chksum_modified_not_mentioned test. |
7 | 7 | ||
8 | This test should pass the | 8 | License-Update: Stuff happened! |
9 | test_metadata_lic_files_chksum.LicFilesChkSum.test_lic_files_chksum_modified_not_mentioned | ||
10 | test. | ||
11 | 9 | ||
12 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
13 | --- | 11 | --- |
14 | .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 ++-- | 12 | meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 2 +- |
15 | 1 file changed, 2 insertions(+), 2 deletions(-) | 13 | 1 file changed, 1 insertion(+), 1 deletion(-) |
16 | 14 | ||
17 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 15 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
18 | index 547587bef4..65dda40aba 100644 | 16 | index 2dc352d479e..356921db1dd 100644 |
19 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 17 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
20 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 18 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
21 | @@ -1,7 +1,7 @@ | 19 | @@ -1,7 +1,7 @@ |
@@ -27,13 +25,6 @@ index 547587bef4..65dda40aba 100644 | |||
27 | 25 | ||
28 | SRC_URI = "file://helloworld.c" | 26 | SRC_URI = "file://helloworld.c" |
29 | 27 | ||
30 | @@ -16,4 +16,4 @@ do_install() { | ||
31 | install -m 0755 helloworld ${D}${bindir} | ||
32 | } | ||
33 | |||
34 | -BBCLASSEXTEND = "native nativesdk" | ||
35 | \ No newline at end of file | ||
36 | +BBCLASSEXTEND = "native nativesdk" | ||
37 | -- | 28 | -- |
38 | 2.41.0 | 29 | 2.45.1 |
39 | 30 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail index e14d644bb2..a7a0b0bacb 100644 --- a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail +++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail | |||
@@ -1,53 +1,42 @@ | |||
1 | From 66430e7c6fbd5187b66560909a510e136fed91c0 Mon Sep 17 00:00:00 2001 | 1 | From 74bc209a4fbe4da2f57e153ccfff3d2241dada8d Mon Sep 17 00:00:00 2001 |
2 | From: Daniela Plascencia <daniela.plascencia@linux.intel.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Thu, 23 Feb 2017 10:34:27 -0600 | 3 | Date: Fri, 31 May 2024 11:03:47 -0400 |
4 | Subject: [PATCH] meta: adding hello-yocto recipe | 4 | Subject: [PATCH] selftest-hello: add selftest-hello-extra |
5 | 5 | ||
6 | This is a sample recipe | 6 | This should fail the test_lic_files_chksum_presence test. |
7 | 7 | ||
8 | Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com> | 8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 9 | --- |
10 | .../hello-world/hello-world/hello_world.c | 5 +++++ | 10 | .../selftest-hello-extra_1.0.bb | 20 +++++++++++++++++++ |
11 | .../hello-world/hello-world_1.0.bb | 18 ++++++++++++++++++ | 11 | 1 file changed, 20 insertions(+) |
12 | 2 files changed, 23 insertions(+) | 12 | create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
13 | create mode 100644 meta/recipes-devtools/hello-world/hello-world/hello_world.c | ||
14 | create mode 100644 meta/recipes-devtools/hello-world/hello-world_1.0.bb | ||
15 | 13 | ||
16 | diff --git a/meta/recipes-devtools/hello-world/hello-world/hello_world.c b/meta/recipes-devtools/hello-world/hello-world/hello_world.c | 14 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
17 | new file mode 100644 | 15 | new file mode 100644 |
18 | index 0000000000..0d59f57d4c | 16 | index 00000000000..875bcbef859 |
19 | --- /dev/null | 17 | --- /dev/null |
20 | +++ b/meta/recipes-devtools/hello-world/hello-world/hello_world.c | 18 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
21 | @@ -0,0 +1,5 @@ | 19 | @@ -0,0 +1,20 @@ |
22 | +#include <stdio.h> | 20 | +SUMMARY = "This is an example summary" |
23 | + | 21 | +DESCRIPTION = "Simple helloworld application -- selftest variant" |
24 | +int main(){ | 22 | +SECTION = "examples" |
25 | + printf("Hello World\n"); | ||
26 | +} | ||
27 | diff --git a/meta/recipes-devtools/hello-world/hello-world_1.0.bb b/meta/recipes-devtools/hello-world/hello-world_1.0.bb | ||
28 | new file mode 100644 | ||
29 | index 0000000000..3c990c108a | ||
30 | --- /dev/null | ||
31 | +++ b/meta/recipes-devtools/hello-world/hello-world_1.0.bb | ||
32 | @@ -0,0 +1,18 @@ | ||
33 | +SUMMARY = "This is a sample summary" | ||
34 | +DESCRIPTION = "This is a sample description" | ||
35 | +HOMEPAGE = "https://sample.com/this-is-a-sample" | ||
36 | +LICENSE = "MIT" | 23 | +LICENSE = "MIT" |
37 | + | 24 | + |
38 | +SRC_URI += "file://hello_world.c" | 25 | +SRC_URI = "file://helloworld.c" |
39 | + | 26 | + |
40 | +SRC_URI[md5sum] = "4ee21e9dcc9b5b6012c23038734e1632" | 27 | +S = "${WORKDIR}/sources" |
41 | +SRC_URI[sha256sum] = "edef2bbde0fbf0d88232782a0eded323f483a0519d6fde9a3b1809056fd35f3e" | 28 | +UNPACKDIR = "${S}" |
42 | + | 29 | + |
43 | +do_compile(){ | 30 | +do_compile() { |
44 | + ${CC} -o hello_world ../hello_world.c | 31 | + ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld |
45 | +} | 32 | +} |
46 | + | 33 | + |
47 | +do_install(){ | 34 | +do_install() { |
48 | + install -d ${D}${bindir} | 35 | + install -d ${D}${bindir} |
49 | + install -m +x hello_world ${D}${bindir}/hello_world | 36 | + install -m 0755 helloworld ${D}${bindir} |
50 | +} | 37 | +} |
38 | + | ||
39 | +BBCLASSEXTEND = "native nativesdk" | ||
51 | -- | 40 | -- |
52 | 2.41.0 | 41 | 2.45.1 |
53 | 42 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass index b8da16dfe5..8ffa97ec56 100644 --- a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass +++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass | |||
@@ -1,54 +1,43 @@ | |||
1 | From 5144d2ba1aa763312c047dd5f8901368cff79da6 Mon Sep 17 00:00:00 2001 | 1 | From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001 |
2 | From: Daniela Plascencia <daniela.plascencia@linux.intel.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Thu, 23 Feb 2017 10:34:27 -0600 | 3 | Date: Fri, 31 May 2024 11:03:47 -0400 |
4 | Subject: [PATCH] meta: adding hello-yocto recipe | 4 | Subject: [PATCH] selftest-hello: add selftest-hello-extra |
5 | 5 | ||
6 | This is a sample recipe | 6 | This should pass the test_lic_files_chksum_presence test. |
7 | 7 | ||
8 | Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com> | 8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 9 | --- |
10 | .../hello-world/hello-world/hello_world.c | 5 +++++ | 10 | .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++ |
11 | .../hello-world/hello-world_1.0.bb | 19 +++++++++++++++++++ | 11 | 1 file changed, 21 insertions(+) |
12 | 2 files changed, 24 insertions(+) | 12 | create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
13 | create mode 100644 meta/recipes-devtools/hello-world/hello-world/hello_world.c | ||
14 | create mode 100644 meta/recipes-devtools/hello-world/hello-world_1.0.bb | ||
15 | 13 | ||
16 | diff --git a/meta/recipes-devtools/hello-world/hello-world/hello_world.c b/meta/recipes-devtools/hello-world/hello-world/hello_world.c | 14 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
17 | new file mode 100644 | 15 | new file mode 100644 |
18 | index 0000000000..0d59f57d4c | 16 | index 00000000000..f3dec1b220c |
19 | --- /dev/null | 17 | --- /dev/null |
20 | +++ b/meta/recipes-devtools/hello-world/hello-world/hello_world.c | 18 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
21 | @@ -0,0 +1,5 @@ | 19 | @@ -0,0 +1,21 @@ |
22 | +#include <stdio.h> | 20 | +SUMMARY = "This is an example summary" |
23 | + | 21 | +DESCRIPTION = "Simple helloworld application -- selftest variant" |
24 | +int main(){ | 22 | +SECTION = "examples" |
25 | + printf("Hello World\n"); | ||
26 | +} | ||
27 | diff --git a/meta/recipes-devtools/hello-world/hello-world_1.0.bb b/meta/recipes-devtools/hello-world/hello-world_1.0.bb | ||
28 | new file mode 100644 | ||
29 | index 0000000000..44d888c82a | ||
30 | --- /dev/null | ||
31 | +++ b/meta/recipes-devtools/hello-world/hello-world_1.0.bb | ||
32 | @@ -0,0 +1,19 @@ | ||
33 | +SUMMARY = "This is a sample summary" | ||
34 | +DESCRIPTION = "This is a sample description" | ||
35 | +HOMEPAGE = "https://sample.com/this-is-a-sample" | ||
36 | +LICENSE = "MIT" | 23 | +LICENSE = "MIT" |
37 | +LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" | 24 | +LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" |
38 | + | 25 | + |
39 | +SRC_URI += "file://hello_world.c" | 26 | +SRC_URI = "file://helloworld.c" |
40 | + | 27 | + |
41 | +SRC_URI[md5sum] = "4ee21e9dcc9b5b6012c23038734e1632" | 28 | +S = "${WORKDIR}/sources" |
42 | +SRC_URI[sha256sum] = "edef2bbde0fbf0d88232782a0eded323f483a0519d6fde9a3b1809056fd35f3e" | 29 | +UNPACKDIR = "${S}" |
43 | + | 30 | + |
44 | +do_compile(){ | 31 | +do_compile() { |
45 | + ${CC} -o hello_world ../hello_world.c | 32 | + ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld |
46 | +} | 33 | +} |
47 | + | 34 | + |
48 | +do_install(){ | 35 | +do_install() { |
49 | + install -d ${D}${bindir} | 36 | + install -d ${D}${bindir} |
50 | + install -m +x hello_world ${D}${bindir}/hello_world | 37 | + install -m 0755 helloworld ${D}${bindir} |
51 | +} | 38 | +} |
39 | + | ||
40 | +BBCLASSEXTEND = "native nativesdk" | ||
52 | -- | 41 | -- |
53 | 2.41.0 | 42 | 2.45.1 |
54 | 43 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail index 983b6e0c2b..0a402d0a3e 100644 --- a/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail +++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail | |||
@@ -1,17 +1,17 @@ | |||
1 | From 4ab06b5f81455249cd5e89d2cce9863803b5ecb5 Mon Sep 17 00:00:00 2001 | 1 | From f2f7b6bcb831289bc3ba2343ad7dc5bee6d6e0cd Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Fri, 8 Sep 2023 14:41:00 -0400 | 3 | Date: Fri, 31 May 2024 08:45:41 -0400 |
4 | Subject: [PATCH] selftest-hello: remove helloworld.c | 4 | Subject: [PATCH] selftest-hello: remove helloworld.c |
5 | 5 | ||
6 | This should fail the test_src_uri_left_files selftest. | 6 | This should fail the test_src_uri_left_files selftest. |
7 | 7 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 9 | --- |
10 | .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 +--- | 10 | meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 2 -- |
11 | 1 file changed, 1 insertion(+), 3 deletions(-) | 11 | 1 file changed, 2 deletions(-) |
12 | 12 | ||
13 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 13 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
14 | index 547587bef4..f6817f05bc 100644 | 14 | index 2dc352d479e..e95270adaeb 100644 |
15 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 15 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
16 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 16 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
17 | @@ -3,8 +3,6 @@ SECTION = "examples" | 17 | @@ -3,8 +3,6 @@ SECTION = "examples" |
@@ -20,16 +20,9 @@ index 547587bef4..f6817f05bc 100644 | |||
20 | 20 | ||
21 | -SRC_URI = "file://helloworld.c" | 21 | -SRC_URI = "file://helloworld.c" |
22 | - | 22 | - |
23 | S = "${WORKDIR}" | 23 | S = "${WORKDIR}/sources" |
24 | UNPACKDIR = "${S}" | ||
24 | 25 | ||
25 | do_compile() { | ||
26 | @@ -16,4 +14,4 @@ do_install() { | ||
27 | install -m 0755 helloworld ${D}${bindir} | ||
28 | } | ||
29 | |||
30 | -BBCLASSEXTEND = "native nativesdk" | ||
31 | \ No newline at end of file | ||
32 | +BBCLASSEXTEND = "native nativesdk" | ||
33 | -- | 26 | -- |
34 | 2.41.0 | 27 | 2.45.1 |
35 | 28 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass index 1f1a77e581..a675c028d0 100644 --- a/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass +++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass | |||
@@ -1,6 +1,6 @@ | |||
1 | From 6c7ac367a873bf827c19b81085c943eace917a99 Mon Sep 17 00:00:00 2001 | 1 | From e79933e2fc68570066eca66f0b599d259b7a1731 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Fri, 8 Sep 2023 14:41:00 -0400 | 3 | Date: Fri, 31 May 2024 08:18:48 -0400 |
4 | Subject: [PATCH] selftest-hello: remove helloworld.c | 4 | Subject: [PATCH] selftest-hello: remove helloworld.c |
5 | 5 | ||
6 | This should pass the test_src_uri_left_files selftest. | 6 | This should pass the test_src_uri_left_files selftest. |
@@ -8,13 +8,13 @@ This should pass the test_src_uri_left_files selftest. | |||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 9 | --- |
10 | .../recipes-test/selftest-hello/files/helloworld.c | 8 -------- | 10 | .../recipes-test/selftest-hello/files/helloworld.c | 8 -------- |
11 | .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 +--- | 11 | .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 2 -- |
12 | 2 files changed, 1 insertion(+), 11 deletions(-) | 12 | 2 files changed, 10 deletions(-) |
13 | delete mode 100644 meta-selftest/recipes-test/selftest-hello/files/helloworld.c | 13 | delete mode 100644 meta-selftest/recipes-test/selftest-hello/files/helloworld.c |
14 | 14 | ||
15 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/helloworld.c b/meta-selftest/recipes-test/selftest-hello/files/helloworld.c | 15 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/helloworld.c b/meta-selftest/recipes-test/selftest-hello/files/helloworld.c |
16 | deleted file mode 100644 | 16 | deleted file mode 100644 |
17 | index fc7169b7b8..0000000000 | 17 | index fc7169b7b83..00000000000 |
18 | --- a/meta-selftest/recipes-test/selftest-hello/files/helloworld.c | 18 | --- a/meta-selftest/recipes-test/selftest-hello/files/helloworld.c |
19 | +++ /dev/null | 19 | +++ /dev/null |
20 | @@ -1,8 +0,0 @@ | 20 | @@ -1,8 +0,0 @@ |
@@ -27,7 +27,7 @@ index fc7169b7b8..0000000000 | |||
27 | - return 0; | 27 | - return 0; |
28 | -} | 28 | -} |
29 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 29 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
30 | index 547587bef4..f6817f05bc 100644 | 30 | index 2dc352d479e..e95270adaeb 100644 |
31 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 31 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
32 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 32 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
33 | @@ -3,8 +3,6 @@ SECTION = "examples" | 33 | @@ -3,8 +3,6 @@ SECTION = "examples" |
@@ -36,16 +36,9 @@ index 547587bef4..f6817f05bc 100644 | |||
36 | 36 | ||
37 | -SRC_URI = "file://helloworld.c" | 37 | -SRC_URI = "file://helloworld.c" |
38 | - | 38 | - |
39 | S = "${WORKDIR}" | 39 | S = "${WORKDIR}/sources" |
40 | UNPACKDIR = "${S}" | ||
40 | 41 | ||
41 | do_compile() { | ||
42 | @@ -16,4 +14,4 @@ do_install() { | ||
43 | install -m 0755 helloworld ${D}${bindir} | ||
44 | } | ||
45 | |||
46 | -BBCLASSEXTEND = "native nativesdk" | ||
47 | \ No newline at end of file | ||
48 | +BBCLASSEXTEND = "native nativesdk" | ||
49 | -- | 42 | -- |
50 | 2.41.0 | 43 | 2.45.1 |
51 | 44 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail index 2d2b4e683d..1087843619 100644 --- a/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail +++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail | |||
@@ -1,46 +1,42 @@ | |||
1 | From e29da5faa74409be394caa09d9f3b7b60f8592b9 Mon Sep 17 00:00:00 2001 | 1 | From f4b72cc24f5e2a290a8637775c4d41c16d5d83aa Mon Sep 17 00:00:00 2001 |
2 | From: Daniela Plascencia <daniela.plascencia@linux.intel.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Thu, 23 Feb 2017 10:34:27 -0600 | 3 | Date: Fri, 31 May 2024 11:03:47 -0400 |
4 | Subject: [PATCH] meta: adding hello-yocto recipe | 4 | Subject: [PATCH] selftest-hello: add selftest-hello-extra |
5 | 5 | ||
6 | This is a sample recipe | 6 | This should fail the test_summary_presence test. |
7 | 7 | ||
8 | Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com> | 8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 9 | --- |
10 | meta/recipes-devtools/hello-world/hello-world/hello_world.c | 5 +++++ | 10 | .../selftest-hello-extra_1.0.bb | 20 +++++++++++++++++++ |
11 | meta/recipes-devtools/hello-world/hello-world_1.0.bb | 12 ++++++++++++ | 11 | 1 file changed, 20 insertions(+) |
12 | 2 files changed, 17 insertions(+) | 12 | create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
13 | create mode 100644 meta/recipes-devtools/hello-world/hello-world/hello_world.c | ||
14 | create mode 100644 meta/recipes-devtools/hello-world/hello-world_1.0.bb | ||
15 | 13 | ||
16 | diff --git a/meta/recipes-devtools/hello-world/hello-world/hello_world.c b/meta/recipes-devtools/hello-world/hello-world/hello_world.c | 14 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
17 | new file mode 100644 | 15 | new file mode 100644 |
18 | index 0000000000..0d59f57d4c | 16 | index 00000000000..2dc352d479e |
19 | --- /dev/null | 17 | --- /dev/null |
20 | +++ b/meta/recipes-devtools/hello-world/hello-world/hello_world.c | 18 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
21 | @@ -0,0 +1,5 @@ | 19 | @@ -0,0 +1,20 @@ |
22 | +#include <stdio.h> | 20 | +DESCRIPTION = "Simple helloworld application -- selftest variant" |
21 | +SECTION = "examples" | ||
22 | +LICENSE = "MIT" | ||
23 | +LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" | ||
23 | + | 24 | + |
24 | +int main(){ | 25 | +SRC_URI = "file://helloworld.c" |
25 | + printf("Hello World\n"); | ||
26 | +} | ||
27 | diff --git a/meta/recipes-devtools/hello-world/hello-world_1.0.bb b/meta/recipes-devtools/hello-world/hello-world_1.0.bb | ||
28 | new file mode 100644 | ||
29 | index 0000000000..c4e1359217 | ||
30 | --- /dev/null | ||
31 | +++ b/meta/recipes-devtools/hello-world/hello-world_1.0.bb | ||
32 | @@ -0,0 +1,12 @@ | ||
33 | +LICENSE = "CLOSED" | ||
34 | + | 26 | + |
35 | +SRC_URI += "file://hello_world.c" | 27 | +S = "${WORKDIR}/sources" |
28 | +UNPACKDIR = "${S}" | ||
36 | + | 29 | + |
37 | +do_compile(){ | 30 | +do_compile() { |
38 | + ${CC} -o hello_world ../hello_world.c | 31 | + ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld |
39 | +} | 32 | +} |
40 | + | 33 | + |
41 | +do_install(){ | 34 | +do_install() { |
42 | + install -d ${D}${bindir} | 35 | + install -d ${D}${bindir} |
43 | + install -m +x hello_world ${D}${bindir}/hello_world | 36 | + install -m 0755 helloworld ${D}${bindir} |
44 | +} | 37 | +} |
45 | -- | 38 | + |
46 | 2.11.0 | 39 | +BBCLASSEXTEND = "native nativesdk" |
40 | -- | ||
41 | 2.45.1 | ||
42 | |||
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass index 55f0309b3f..3d35a8d8fb 100644 --- a/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass +++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass | |||
@@ -1,49 +1,43 @@ | |||
1 | From 0cd2fed12ce4b7b071edde12aec4481ad7a6f107 Mon Sep 17 00:00:00 2001 | 1 | From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001 |
2 | From: Daniela Plascencia <daniela.plascencia@linux.intel.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Thu, 23 Feb 2017 10:34:27 -0600 | 3 | Date: Fri, 31 May 2024 11:03:47 -0400 |
4 | Subject: [PATCH] meta: adding hello-yocto recipe | 4 | Subject: [PATCH] selftest-hello: add selftest-hello-extra |
5 | 5 | ||
6 | This is a sample recipe | 6 | This should pass the test_summary_presence test. |
7 | 7 | ||
8 | Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com> | 8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 9 | --- |
10 | .../hello-world/hello-world/hello_world.c | 5 +++++ | 10 | .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++ |
11 | meta/recipes-devtools/hello-world/hello-world_1.0.bb | 15 +++++++++++++++ | 11 | 1 file changed, 21 insertions(+) |
12 | 2 files changed, 20 insertions(+) | 12 | create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
13 | create mode 100644 meta/recipes-devtools/hello-world/hello-world/hello_world.c | ||
14 | create mode 100644 meta/recipes-devtools/hello-world/hello-world_1.0.bb | ||
15 | 13 | ||
16 | diff --git a/meta/recipes-devtools/hello-world/hello-world/hello_world.c b/meta/recipes-devtools/hello-world/hello-world/hello_world.c | 14 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
17 | new file mode 100644 | 15 | new file mode 100644 |
18 | index 0000000000..0d59f57d4c | 16 | index 00000000000..f3dec1b220c |
19 | --- /dev/null | 17 | --- /dev/null |
20 | +++ b/meta/recipes-devtools/hello-world/hello-world/hello_world.c | 18 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb |
21 | @@ -0,0 +1,5 @@ | 19 | @@ -0,0 +1,21 @@ |
22 | +#include <stdio.h> | 20 | +SUMMARY = "This is an example summary" |
21 | +DESCRIPTION = "Simple helloworld application -- selftest variant" | ||
22 | +SECTION = "examples" | ||
23 | +LICENSE = "MIT" | ||
24 | +LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" | ||
23 | + | 25 | + |
24 | +int main(){ | 26 | +SRC_URI = "file://helloworld.c" |
25 | + printf("Hello World\n"); | ||
26 | +} | ||
27 | diff --git a/meta/recipes-devtools/hello-world/hello-world_1.0.bb b/meta/recipes-devtools/hello-world/hello-world_1.0.bb | ||
28 | new file mode 100644 | ||
29 | index 0000000000..c54283eece | ||
30 | --- /dev/null | ||
31 | +++ b/meta/recipes-devtools/hello-world/hello-world_1.0.bb | ||
32 | @@ -0,0 +1,15 @@ | ||
33 | +SUMMARY = "This is a sample summary" | ||
34 | +DESCRIPTION = "This is a sample description" | ||
35 | +HOMEPAGE = "https://sample.com/this-is-a-sample" | ||
36 | +LICENSE = "CLOSED" | ||
37 | + | 27 | + |
38 | +SRC_URI += "file://hello_world.c" | 28 | +S = "${WORKDIR}/sources" |
29 | +UNPACKDIR = "${S}" | ||
39 | + | 30 | + |
40 | +do_compile(){ | 31 | +do_compile() { |
41 | + ${CC} -o hello_world ../hello_world.c | 32 | + ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld |
42 | +} | 33 | +} |
43 | + | 34 | + |
44 | +do_install(){ | 35 | +do_install() { |
45 | + install -d ${D}${bindir} | 36 | + install -d ${D}${bindir} |
46 | + install -m +x hello_world ${D}${bindir}/hello_world | 37 | + install -m 0755 helloworld ${D}${bindir} |
47 | +} | 38 | +} |
48 | -- | 39 | + |
49 | 2.11.0 | 40 | +BBCLASSEXTEND = "native nativesdk" |
41 | -- | ||
42 | 2.45.1 | ||
43 | |||
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail index c763a7506e..f64f2a40b0 100644 --- a/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail +++ b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail | |||
@@ -1,29 +1,31 @@ | |||
1 | From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Tue, 29 Aug 2023 14:12:27 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 |
5 | 5 | ||
6 | CVE: CVE-BAD-FORMAT | 6 | This should fail the test_cve_tag_format test. |
7 | |||
8 | CVE: CVE-1234-56789 | ||
7 | 9 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 11 | --- |
10 | .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++ | 12 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
11 | .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- | 13 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
12 | 2 files changed, 31 insertions(+), 2 deletions(-) | 14 | 2 files changed, 29 insertions(+), 1 deletion(-) |
13 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 15 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
14 | 16 | ||
15 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 17 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
16 | new file mode 100644 | 18 | new file mode 100644 |
17 | index 0000000000..9219b8db62 | 19 | index 00000000000..8a4f9329303 |
18 | --- /dev/null | 20 | --- /dev/null |
19 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 21 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
20 | @@ -0,0 +1,27 @@ | 22 | @@ -0,0 +1,26 @@ |
21 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | 23 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
22 | +From: Trevor Gamblin <tgamblin@baylibre.com> | 24 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
23 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | 25 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 |
24 | +Subject: [PATCH] Fix CVE-NOT-REAL | 26 | +Subject: [PATCH] Fix CVE-NOT-REAL |
25 | + | 27 | + |
26 | +CVE: CVE-BAD-FORMAT | 28 | +CVE: CVE-BAD_FORMAT |
27 | +Upstream-Status: Backport(http://example.com/example) | 29 | +Upstream-Status: Backport(http://example.com/example) |
28 | + | 30 | + |
29 | +Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 31 | +Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
@@ -35,19 +37,17 @@ index 0000000000..9219b8db62 | |||
35 | +index 1788f38..83d7918 100644 | 37 | +index 1788f38..83d7918 100644 |
36 | +--- a/strlen.c | 38 | +--- a/strlen.c |
37 | ++++ b/strlen.c | 39 | ++++ b/strlen.c |
38 | +@@ -8,6 +8,7 @@ int main() { | ||
39 | + | ||
40 | + printf("%d\n", str_len(string1)); | ||
41 | + printf("%d\n", str_len(string2)); | ||
42 | ++ printf("CVE FIXED!!!\n"); | ||
43 | + | ||
44 | + return 0; | ||
45 | + } | ||
46 | +-- | ||
47 | +2.41.0 | ||
48 | + | 40 | + |
41 | +int main() { | ||
42 | + | ||
43 | + printf("%d\n", str_len(string1)); | ||
44 | + printf("%d\n", str_len(string2)); | ||
45 | + printf("CVE FIXED!!!\n"); | ||
46 | + | ||
47 | + return 0; | ||
48 | +} | ||
49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
50 | index 547587bef4..76975a6729 100644 | 50 | index 2dc352d479e..d937759f157 100644 |
51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
53 | @@ -3,7 +3,9 @@ SECTION = "examples" | 53 | @@ -3,7 +3,9 @@ SECTION = "examples" |
@@ -56,18 +56,11 @@ index 547587bef4..76975a6729 100644 | |||
56 | 56 | ||
57 | -SRC_URI = "file://helloworld.c" | 57 | -SRC_URI = "file://helloworld.c" |
58 | +SRC_URI = "file://helloworld.c \ | 58 | +SRC_URI = "file://helloworld.c \ |
59 | + file://CVE-1234-56789.patch \ | 59 | + file://0001-Fix-CVE-1234-56789.patch \ |
60 | + " | 60 | + " |
61 | |||
62 | S = "${WORKDIR}" | ||
63 | |||
64 | @@ -16,4 +18,4 @@ do_install() { | ||
65 | install -m 0755 helloworld ${D}${bindir} | ||
66 | } | ||
67 | 61 | ||
68 | -BBCLASSEXTEND = "native nativesdk" | 62 | S = "${WORKDIR}/sources" |
69 | \ No newline at end of file | 63 | UNPACKDIR = "${S}" |
70 | +BBCLASSEXTEND = "native nativesdk" | ||
71 | -- | 64 | -- |
72 | 2.41.0 | 65 | 2.45.1 |
73 | 66 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass index ef6017037c..3819487041 100644 --- a/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass +++ b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass | |||
@@ -1,23 +1,25 @@ | |||
1 | From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Tue, 29 Aug 2023 14:12:27 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 |
5 | 5 | ||
6 | This should pass the test_cve_tag format test. | ||
7 | |||
6 | CVE: CVE-1234-56789 | 8 | CVE: CVE-1234-56789 |
7 | 9 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 11 | --- |
10 | .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++ | 12 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
11 | .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- | 13 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
12 | 2 files changed, 31 insertions(+), 2 deletions(-) | 14 | 2 files changed, 29 insertions(+), 1 deletion(-) |
13 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 15 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
14 | 16 | ||
15 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 17 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
16 | new file mode 100644 | 18 | new file mode 100644 |
17 | index 0000000000..9219b8db62 | 19 | index 00000000000..8a4f9329303 |
18 | --- /dev/null | 20 | --- /dev/null |
19 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch | 21 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
20 | @@ -0,0 +1,27 @@ | 22 | @@ -0,0 +1,26 @@ |
21 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | 23 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
22 | +From: Trevor Gamblin <tgamblin@baylibre.com> | 24 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
23 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | 25 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 |
@@ -35,19 +37,17 @@ index 0000000000..9219b8db62 | |||
35 | +index 1788f38..83d7918 100644 | 37 | +index 1788f38..83d7918 100644 |
36 | +--- a/strlen.c | 38 | +--- a/strlen.c |
37 | ++++ b/strlen.c | 39 | ++++ b/strlen.c |
38 | +@@ -8,6 +8,7 @@ int main() { | ||
39 | + | ||
40 | + printf("%d\n", str_len(string1)); | ||
41 | + printf("%d\n", str_len(string2)); | ||
42 | ++ printf("CVE FIXED!!!\n"); | ||
43 | + | ||
44 | + return 0; | ||
45 | + } | ||
46 | +-- | ||
47 | +2.41.0 | ||
48 | + | 40 | + |
41 | +int main() { | ||
42 | + | ||
43 | + printf("%d\n", str_len(string1)); | ||
44 | + printf("%d\n", str_len(string2)); | ||
45 | + printf("CVE FIXED!!!\n"); | ||
46 | + | ||
47 | + return 0; | ||
48 | +} | ||
49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
50 | index 547587bef4..76975a6729 100644 | 50 | index 2dc352d479e..d937759f157 100644 |
51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
53 | @@ -3,7 +3,9 @@ SECTION = "examples" | 53 | @@ -3,7 +3,9 @@ SECTION = "examples" |
@@ -56,18 +56,11 @@ index 547587bef4..76975a6729 100644 | |||
56 | 56 | ||
57 | -SRC_URI = "file://helloworld.c" | 57 | -SRC_URI = "file://helloworld.c" |
58 | +SRC_URI = "file://helloworld.c \ | 58 | +SRC_URI = "file://helloworld.c \ |
59 | + file://CVE-1234-56789.patch \ | 59 | + file://0001-Fix-CVE-1234-56789.patch \ |
60 | + " | 60 | + " |
61 | |||
62 | S = "${WORKDIR}" | ||
63 | |||
64 | @@ -16,4 +18,4 @@ do_install() { | ||
65 | install -m 0755 helloworld ${D}${bindir} | ||
66 | } | ||
67 | 61 | ||
68 | -BBCLASSEXTEND = "native nativesdk" | 62 | S = "${WORKDIR}/sources" |
69 | \ No newline at end of file | 63 | UNPACKDIR = "${S}" |
70 | +BBCLASSEXTEND = "native nativesdk" | ||
71 | -- | 64 | -- |
72 | 2.41.0 | 65 | 2.45.1 |
73 | 66 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail index ce8bf7b7d1..b2d0fab9e3 100644 --- a/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail +++ b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail | |||
@@ -1,23 +1,25 @@ | |||
1 | From 5a2d0ac780a0f4c046fb1a3c3463d3e726f191cb Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Tue, 29 Aug 2023 14:12:27 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 |
5 | 5 | ||
6 | This should fail the test_signed_off_by_presence test. | ||
7 | |||
6 | CVE: CVE-1234-56789 | 8 | CVE: CVE-1234-56789 |
7 | 9 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 11 | --- |
10 | .../selftest-hello/files/CVE-1234-56789.patch | 26 +++++++++++++++++++ | 12 | .../files/0001-Fix-CVE-1234-56789.patch | 25 +++++++++++++++++++ |
11 | .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- | 13 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
12 | 2 files changed, 30 insertions(+), 2 deletions(-) | 14 | 2 files changed, 28 insertions(+), 1 deletion(-) |
13 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 15 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
14 | 16 | ||
15 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 17 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
16 | new file mode 100644 | 18 | new file mode 100644 |
17 | index 0000000000..92a5b65a53 | 19 | index 00000000000..8a4f9329303 |
18 | --- /dev/null | 20 | --- /dev/null |
19 | +++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 21 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
20 | @@ -0,0 +1,26 @@ | 22 | @@ -0,0 +1,25 @@ |
21 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | 23 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
22 | +From: Trevor Gamblin <tgamblin@baylibre.com> | 24 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
23 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | 25 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 |
@@ -34,18 +36,17 @@ index 0000000000..92a5b65a53 | |||
34 | +index 1788f38..83d7918 100644 | 36 | +index 1788f38..83d7918 100644 |
35 | +--- a/strlen.c | 37 | +--- a/strlen.c |
36 | ++++ b/strlen.c | 38 | ++++ b/strlen.c |
37 | +@@ -8,6 +8,7 @@ int main() { | 39 | + |
38 | + | 40 | +int main() { |
39 | + printf("%d\n", str_len(string1)); | 41 | + |
40 | + printf("%d\n", str_len(string2)); | 42 | + printf("%d\n", str_len(string1)); |
41 | ++ printf("CVE FIXED!!!\n"); | 43 | + printf("%d\n", str_len(string2)); |
42 | + | 44 | + printf("CVE FIXED!!!\n"); |
43 | + return 0; | 45 | + |
44 | + } | 46 | + return 0; |
45 | +-- | 47 | +} |
46 | +2.41.0 | ||
47 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 48 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
48 | index 547587bef4..76975a6729 100644 | 49 | index 2dc352d479e..d937759f157 100644 |
49 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 50 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
50 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 51 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
51 | @@ -3,7 +3,9 @@ SECTION = "examples" | 52 | @@ -3,7 +3,9 @@ SECTION = "examples" |
@@ -54,18 +55,11 @@ index 547587bef4..76975a6729 100644 | |||
54 | 55 | ||
55 | -SRC_URI = "file://helloworld.c" | 56 | -SRC_URI = "file://helloworld.c" |
56 | +SRC_URI = "file://helloworld.c \ | 57 | +SRC_URI = "file://helloworld.c \ |
57 | + file://CVE-1234-56789.patch \ | 58 | + file://0001-Fix-CVE-1234-56789.patch \ |
58 | + " | 59 | + " |
59 | |||
60 | S = "${WORKDIR}" | ||
61 | |||
62 | @@ -16,4 +18,4 @@ do_install() { | ||
63 | install -m 0755 helloworld ${D}${bindir} | ||
64 | } | ||
65 | 60 | ||
66 | -BBCLASSEXTEND = "native nativesdk" | 61 | S = "${WORKDIR}/sources" |
67 | \ No newline at end of file | 62 | UNPACKDIR = "${S}" |
68 | +BBCLASSEXTEND = "native nativesdk" | ||
69 | -- | 63 | -- |
70 | 2.41.0 | 64 | 2.45.1 |
71 | 65 | ||
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass index ea34c76f0d..2661c1416f 100644 --- a/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass +++ b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass | |||
@@ -1,23 +1,25 @@ | |||
1 | From 14d72f6973270f78455a8628143f2cff90e8f41e Mon Sep 17 00:00:00 2001 | 1 | From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001 |
2 | From: Trevor Gamblin <tgamblin@baylibre.com> | 2 | From: Trevor Gamblin <tgamblin@baylibre.com> |
3 | Date: Tue, 29 Aug 2023 14:12:27 -0400 | 3 | Date: Fri, 31 May 2024 09:54:50 -0400 |
4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 | 4 | Subject: [PATCH] selftest-hello: fix CVE-1234-56789 |
5 | 5 | ||
6 | This should pass the test_signed_off_by_presence test. | ||
7 | |||
6 | CVE: CVE-1234-56789 | 8 | CVE: CVE-1234-56789 |
7 | 9 | ||
8 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | 10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
9 | --- | 11 | --- |
10 | .../selftest-hello/files/CVE-1234-56789.patch | 27 +++++++++++++++++++ | 12 | .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++ |
11 | .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- | 13 | .../selftest-hello/selftest-hello_1.0.bb | 4 ++- |
12 | 2 files changed, 31 insertions(+), 2 deletions(-) | 14 | 2 files changed, 29 insertions(+), 1 deletion(-) |
13 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 15 | create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
14 | 16 | ||
15 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 17 | diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
16 | new file mode 100644 | 18 | new file mode 100644 |
17 | index 0000000000..869cfb6fe5 | 19 | index 00000000000..8a4f9329303 |
18 | --- /dev/null | 20 | --- /dev/null |
19 | +++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch | 21 | +++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch |
20 | @@ -0,0 +1,27 @@ | 22 | @@ -0,0 +1,26 @@ |
21 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 | 23 | +From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 |
22 | +From: Trevor Gamblin <tgamblin@baylibre.com> | 24 | +From: Trevor Gamblin <tgamblin@baylibre.com> |
23 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 | 25 | +Date: Tue, 29 Aug 2023 14:08:20 -0400 |
@@ -35,18 +37,17 @@ index 0000000000..869cfb6fe5 | |||
35 | +index 1788f38..83d7918 100644 | 37 | +index 1788f38..83d7918 100644 |
36 | +--- a/strlen.c | 38 | +--- a/strlen.c |
37 | ++++ b/strlen.c | 39 | ++++ b/strlen.c |
38 | +@@ -8,6 +8,7 @@ int main() { | 40 | + |
39 | + | 41 | +int main() { |
40 | + printf("%d\n", str_len(string1)); | 42 | + |
41 | + printf("%d\n", str_len(string2)); | 43 | + printf("%d\n", str_len(string1)); |
42 | ++ printf("CVE FIXED!!!\n"); | 44 | + printf("%d\n", str_len(string2)); |
43 | + | 45 | + printf("CVE FIXED!!!\n"); |
44 | + return 0; | 46 | + |
45 | + } | 47 | + return 0; |
46 | +-- | 48 | +} |
47 | +2.41.0 | ||
48 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 49 | diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
49 | index 547587bef4..76975a6729 100644 | 50 | index 2dc352d479e..d937759f157 100644 |
50 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 51 | --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
51 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 52 | +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb |
52 | @@ -3,7 +3,9 @@ SECTION = "examples" | 53 | @@ -3,7 +3,9 @@ SECTION = "examples" |
@@ -55,18 +56,11 @@ index 547587bef4..76975a6729 100644 | |||
55 | 56 | ||
56 | -SRC_URI = "file://helloworld.c" | 57 | -SRC_URI = "file://helloworld.c" |
57 | +SRC_URI = "file://helloworld.c \ | 58 | +SRC_URI = "file://helloworld.c \ |
58 | + file://CVE-1234-56789.patch \ | 59 | + file://0001-Fix-CVE-1234-56789.patch \ |
59 | + " | 60 | + " |
60 | |||
61 | S = "${WORKDIR}" | ||
62 | |||
63 | @@ -16,4 +18,4 @@ do_install() { | ||
64 | install -m 0755 helloworld ${D}${bindir} | ||
65 | } | ||
66 | 61 | ||
67 | -BBCLASSEXTEND = "native nativesdk" | 62 | S = "${WORKDIR}/sources" |
68 | \ No newline at end of file | 63 | UNPACKDIR = "${S}" |
69 | +BBCLASSEXTEND = "native nativesdk" | ||
70 | -- | 64 | -- |
71 | 2.41.0 | 65 | 2.45.1 |
72 | 66 | ||
diff --git a/meta/lib/patchtest/selftest/selftest b/meta/lib/patchtest/selftest/selftest index 6fad50ce61..3cf1c361f7 100755 --- a/meta/lib/patchtest/selftest/selftest +++ b/meta/lib/patchtest/selftest/selftest | |||
@@ -38,7 +38,7 @@ def test(root, patch): | |||
38 | res = True | 38 | res = True |
39 | patchpath = os.path.abspath(os.path.join(root, patch)) | 39 | patchpath = os.path.abspath(os.path.join(root, patch)) |
40 | 40 | ||
41 | cmd = 'patchtest --repodir %s --testdir %s/tests --patch %s' % (repodir, topdir, patchpath) | 41 | cmd = 'patchtest --base-commit HEAD --repodir %s --testdir %s/tests --patch %s' % (repodir, topdir, patchpath) |
42 | results = subprocess.check_output(cmd, stderr=subprocess.STDOUT, universal_newlines=True, shell=True) | 42 | results = subprocess.check_output(cmd, stderr=subprocess.STDOUT, universal_newlines=True, shell=True) |
43 | 43 | ||
44 | return results | 44 | return results |
diff --git a/meta/lib/patchtest/tests/base.py b/meta/lib/patchtest/tests/base.py index 424e61b5be..919ca136bb 100644 --- a/meta/lib/patchtest/tests/base.py +++ b/meta/lib/patchtest/tests/base.py | |||
@@ -8,20 +8,23 @@ import unittest | |||
8 | import logging | 8 | import logging |
9 | import json | 9 | import json |
10 | import unidiff | 10 | import unidiff |
11 | from data import PatchTestInput | 11 | from patchtest_parser import PatchtestParser |
12 | import mailbox | 12 | import mailbox |
13 | import patchtest_patterns | ||
13 | import collections | 14 | import collections |
14 | import sys | 15 | import sys |
15 | import os | 16 | import os |
16 | import re | 17 | import re |
17 | 18 | ||
18 | sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'pyparsing')) | 19 | logger = logging.getLogger("patchtest") |
20 | debug = logger.debug | ||
21 | info = logger.info | ||
22 | warn = logger.warn | ||
23 | error = logger.error | ||
19 | 24 | ||
20 | logger = logging.getLogger('patchtest') | 25 | Commit = collections.namedtuple( |
21 | debug=logger.debug | 26 | "Commit", ["author", "subject", "commit_message", "shortlog", "payload"] |
22 | info=logger.info | 27 | ) |
23 | warn=logger.warn | ||
24 | error=logger.error | ||
25 | 28 | ||
26 | Commit = collections.namedtuple('Commit', ['author', 'subject', 'commit_message', 'shortlog', 'payload']) | 29 | Commit = collections.namedtuple('Commit', ['author', 'subject', 'commit_message', 'shortlog', 'payload']) |
27 | 30 | ||
@@ -34,10 +37,6 @@ class PatchtestOEError(Exception): | |||
34 | class Base(unittest.TestCase): | 37 | class Base(unittest.TestCase): |
35 | # if unit test fails, fail message will throw at least the following JSON: {"id": <testid>} | 38 | # if unit test fails, fail message will throw at least the following JSON: {"id": <testid>} |
36 | 39 | ||
37 | endcommit_messages_regex = re.compile(r'\(From \w+-\w+ rev:|(?<!\S)Signed-off-by|(?<!\S)---\n') | ||
38 | patchmetadata_regex = re.compile(r'-{3} \S+|\+{3} \S+|@{2} -\d+,\d+ \+\d+,\d+ @{2} \S+') | ||
39 | |||
40 | |||
41 | @staticmethod | 40 | @staticmethod |
42 | def msg_to_commit(msg): | 41 | def msg_to_commit(msg): |
43 | payload = msg.get_payload() | 42 | payload = msg.get_payload() |
@@ -50,7 +49,7 @@ class Base(unittest.TestCase): | |||
50 | @staticmethod | 49 | @staticmethod |
51 | def commit_message(payload): | 50 | def commit_message(payload): |
52 | commit_message = payload.__str__() | 51 | commit_message = payload.__str__() |
53 | match = Base.endcommit_messages_regex.search(payload) | 52 | match = patchtest_patterns.endcommit_messages_regex.search(payload) |
54 | if match: | 53 | if match: |
55 | commit_message = payload[:match.start()] | 54 | commit_message = payload[:match.start()] |
56 | return commit_message | 55 | return commit_message |
@@ -66,13 +65,15 @@ class Base(unittest.TestCase): | |||
66 | def setUpClass(cls): | 65 | def setUpClass(cls): |
67 | 66 | ||
68 | # General objects: mailbox.mbox and patchset | 67 | # General objects: mailbox.mbox and patchset |
69 | cls.mbox = mailbox.mbox(PatchTestInput.repo.patch) | 68 | cls.mbox = mailbox.mbox(PatchtestParser.repo.patch.path) |
70 | 69 | ||
71 | # Patch may be malformed, so try parsing it | 70 | # Patch may be malformed, so try parsing it |
72 | cls.unidiff_parse_error = '' | 71 | cls.unidiff_parse_error = '' |
73 | cls.patchset = None | 72 | cls.patchset = None |
74 | try: | 73 | try: |
75 | cls.patchset = unidiff.PatchSet.from_filename(PatchTestInput.repo.patch, encoding=u'UTF-8') | 74 | cls.patchset = unidiff.PatchSet.from_filename( |
75 | PatchtestParser.repo.patch.path, encoding="UTF-8" | ||
76 | ) | ||
76 | except unidiff.UnidiffParseError as upe: | 77 | except unidiff.UnidiffParseError as upe: |
77 | cls.patchset = [] | 78 | cls.patchset = [] |
78 | cls.unidiff_parse_error = str(upe) | 79 | cls.unidiff_parse_error = str(upe) |
@@ -149,7 +150,7 @@ class Metadata(Base): | |||
149 | 150 | ||
150 | # import relevant libraries | 151 | # import relevant libraries |
151 | try: | 152 | try: |
152 | scripts_path = os.path.join(PatchTestInput.repodir, 'scripts', 'lib') | 153 | scripts_path = os.path.join(PatchtestParser.repodir, "scripts", "lib") |
153 | if scripts_path not in sys.path: | 154 | if scripts_path not in sys.path: |
154 | sys.path.insert(0, scripts_path) | 155 | sys.path.insert(0, scripts_path) |
155 | import scriptpath | 156 | import scriptpath |
@@ -224,11 +225,23 @@ class Metadata(Base): | |||
224 | for patch in patchset: | 225 | for patch in patchset: |
225 | if patch.path.endswith('.bb') or patch.path.endswith('.bbappend') or patch.path.endswith('.inc'): | 226 | if patch.path.endswith('.bb') or patch.path.endswith('.bbappend') or patch.path.endswith('.inc'): |
226 | if patch.is_added_file: | 227 | if patch.is_added_file: |
227 | added_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path)) | 228 | added_paths.append( |
229 | os.path.join( | ||
230 | os.path.abspath(PatchtestParser.repodir), patch.path | ||
231 | ) | ||
232 | ) | ||
228 | elif patch.is_modified_file: | 233 | elif patch.is_modified_file: |
229 | modified_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path)) | 234 | modified_paths.append( |
235 | os.path.join( | ||
236 | os.path.abspath(PatchtestParser.repodir), patch.path | ||
237 | ) | ||
238 | ) | ||
230 | elif patch.is_removed_file: | 239 | elif patch.is_removed_file: |
231 | removed_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path)) | 240 | removed_paths.append( |
241 | os.path.join( | ||
242 | os.path.abspath(PatchtestParser.repodir), patch.path | ||
243 | ) | ||
244 | ) | ||
232 | 245 | ||
233 | data = cls.tinfoil.cooker.recipecaches[''].pkg_fn.items() | 246 | data = cls.tinfoil.cooker.recipecaches[''].pkg_fn.items() |
234 | 247 | ||
diff --git a/meta/lib/patchtest/tests/pyparsing/common.py b/meta/lib/patchtest/tests/pyparsing/common.py deleted file mode 100644 index cbce4c38bc..0000000000 --- a/meta/lib/patchtest/tests/pyparsing/common.py +++ /dev/null | |||
@@ -1,26 +0,0 @@ | |||
1 | # common pyparsing variables | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | import pyparsing | ||
8 | |||
9 | # general | ||
10 | colon = pyparsing.Literal(":") | ||
11 | start = pyparsing.LineStart() | ||
12 | end = pyparsing.LineEnd() | ||
13 | at = pyparsing.Literal("@") | ||
14 | lessthan = pyparsing.Literal("<") | ||
15 | greaterthan = pyparsing.Literal(">") | ||
16 | opensquare = pyparsing.Literal("[") | ||
17 | closesquare = pyparsing.Literal("]") | ||
18 | inappropriate = pyparsing.CaselessLiteral("Inappropriate") | ||
19 | submitted = pyparsing.CaselessLiteral("Submitted") | ||
20 | |||
21 | # word related | ||
22 | nestexpr = pyparsing.nestedExpr(opener='[', closer=']') | ||
23 | inappropriateinfo = pyparsing.Literal("Inappropriate") + nestexpr | ||
24 | submittedinfo = pyparsing.Literal("Submitted") + nestexpr | ||
25 | word = pyparsing.Word(pyparsing.alphas) | ||
26 | worddot = pyparsing.Word(pyparsing.alphas+".") | ||
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py b/meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py deleted file mode 100644 index f7fb82ec2b..0000000000 --- a/meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py +++ /dev/null | |||
@@ -1,18 +0,0 @@ | |||
1 | # signed-off-by pyparsing definition | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | |||
8 | import pyparsing | ||
9 | import common | ||
10 | |||
11 | name = pyparsing.Regex('\S+.*(?= <)') | ||
12 | username = pyparsing.OneOrMore(common.worddot) | ||
13 | domain = pyparsing.OneOrMore(common.worddot) | ||
14 | cve = pyparsing.Regex('CVE\-\d{4}\-\d+') | ||
15 | cve_mark = pyparsing.Literal("CVE:") | ||
16 | |||
17 | cve_tag = pyparsing.AtLineStart(cve_mark + cve) | ||
18 | patch_cve_tag = pyparsing.AtLineStart("+" + cve_mark + cve) | ||
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_shortlog.py b/meta/lib/patchtest/tests/pyparsing/parse_shortlog.py deleted file mode 100644 index 30d3ab35b3..0000000000 --- a/meta/lib/patchtest/tests/pyparsing/parse_shortlog.py +++ /dev/null | |||
@@ -1,14 +0,0 @@ | |||
1 | # subject pyparsing definition | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | # NOTE:This is an oversimplified syntax of the mbox's summary | ||
8 | |||
9 | import pyparsing | ||
10 | import common | ||
11 | |||
12 | target = pyparsing.OneOrMore(pyparsing.Word(pyparsing.printables.replace(':',''))) | ||
13 | summary = pyparsing.OneOrMore(pyparsing.Word(pyparsing.printables)) | ||
14 | shortlog = common.start + target + common.colon + summary + common.end | ||
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py b/meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py deleted file mode 100644 index 692ebec3ff..0000000000 --- a/meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py +++ /dev/null | |||
@@ -1,22 +0,0 @@ | |||
1 | # signed-off-by pyparsing definition | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | |||
8 | import pyparsing | ||
9 | import common | ||
10 | |||
11 | name = pyparsing.Regex('\S+.*(?= <)') | ||
12 | username = pyparsing.OneOrMore(common.worddot) | ||
13 | domain = pyparsing.OneOrMore(common.worddot) | ||
14 | |||
15 | # taken from https://pyparsing-public.wikispaces.com/Helpful+Expressions | ||
16 | email = pyparsing.Regex(r"(?P<user>[A-Za-z0-9._%+-]+)@(?P<hostname>[A-Za-z0-9.-]+)\.(?P<domain>[A-Za-z]{2,})") | ||
17 | |||
18 | email_enclosed = common.lessthan + email + common.greaterthan | ||
19 | |||
20 | signed_off_by_mark = pyparsing.Literal("Signed-off-by:") | ||
21 | signed_off_by = pyparsing.AtLineStart(signed_off_by_mark + name + email_enclosed) | ||
22 | patch_signed_off_by = pyparsing.AtLineStart("+" + signed_off_by_mark + name + email_enclosed) | ||
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py b/meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py deleted file mode 100644 index bc6c427c4c..0000000000 --- a/meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py +++ /dev/null | |||
@@ -1,24 +0,0 @@ | |||
1 | # upstream-status pyparsing definition | ||
2 | # | ||
3 | # Copyright (C) 2016 Intel Corporation | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | |||
8 | import common | ||
9 | import pyparsing | ||
10 | |||
11 | upstream_status_literal_valid_status = ["Pending", "Backport", "Denied", "Inappropriate", "Submitted"] | ||
12 | upstream_status_nonliteral_valid_status = ["Pending", "Backport", "Denied", "Inappropriate [reason]", "Submitted [where]"] | ||
13 | |||
14 | upstream_status_valid_status = pyparsing.Or( | ||
15 | [pyparsing.Literal(status) for status in upstream_status_literal_valid_status] | ||
16 | ) | ||
17 | |||
18 | upstream_status_mark = pyparsing.Literal("Upstream-Status") | ||
19 | inappropriate_status_mark = common.inappropriate | ||
20 | submitted_status_mark = common.submitted | ||
21 | |||
22 | upstream_status = common.start + upstream_status_mark + common.colon + upstream_status_valid_status | ||
23 | upstream_status_inappropriate_info = common.start + upstream_status_mark + common.colon + common.inappropriateinfo | ||
24 | upstream_status_submitted_info = common.start + upstream_status_mark + common.colon + common.submittedinfo | ||
diff --git a/meta/lib/patchtest/tests/test_mbox.py b/meta/lib/patchtest/tests/test_mbox.py index 0b623b7d17..dab733ea77 100644 --- a/meta/lib/patchtest/tests/test_mbox.py +++ b/meta/lib/patchtest/tests/test_mbox.py | |||
@@ -6,15 +6,15 @@ | |||
6 | 6 | ||
7 | import base | 7 | import base |
8 | import collections | 8 | import collections |
9 | import parse_shortlog | 9 | import patchtest_patterns |
10 | import parse_signed_off_by | ||
11 | import pyparsing | 10 | import pyparsing |
11 | import re | ||
12 | import subprocess | 12 | import subprocess |
13 | from data import PatchTestInput | 13 | from patchtest_parser import PatchtestParser |
14 | 14 | ||
15 | def headlog(): | 15 | def headlog(): |
16 | output = subprocess.check_output( | 16 | output = subprocess.check_output( |
17 | "cd %s; git log --pretty='%%h#%%aN#%%cD:#%%s' -1" % PatchTestInput.repodir, | 17 | "cd %s; git log --pretty='%%h#%%aN#%%cD:#%%s' -1" % PatchtestParser.repodir, |
18 | universal_newlines=True, | 18 | universal_newlines=True, |
19 | shell=True | 19 | shell=True |
20 | ) | 20 | ) |
@@ -22,20 +22,6 @@ def headlog(): | |||
22 | 22 | ||
23 | class TestMbox(base.Base): | 23 | class TestMbox(base.Base): |
24 | 24 | ||
25 | auh_email = 'auh@auh.yoctoproject.org' | ||
26 | |||
27 | invalids = [pyparsing.Regex("^Upgrade Helper.+"), | ||
28 | pyparsing.Regex(auh_email), | ||
29 | pyparsing.Regex("uh@not\.set"), | ||
30 | pyparsing.Regex("\S+@example\.com")] | ||
31 | |||
32 | rexp_detect = pyparsing.Regex('\[\s?YOCTO.*\]') | ||
33 | rexp_validation = pyparsing.Regex('\[(\s?YOCTO\s?#\s?(\d+)\s?,?)+\]') | ||
34 | revert_shortlog_regex = pyparsing.Regex('Revert\s+".*"') | ||
35 | signoff_prog = parse_signed_off_by.signed_off_by | ||
36 | revert_shortlog_regex = pyparsing.Regex('Revert\s+".*"') | ||
37 | maxlength = 90 | ||
38 | |||
39 | # base paths of main yocto project sub-projects | 25 | # base paths of main yocto project sub-projects |
40 | paths = { | 26 | paths = { |
41 | 'oe-core': ['meta-selftest', 'meta-skeleton', 'meta', 'scripts'], | 27 | 'oe-core': ['meta-selftest', 'meta-skeleton', 'meta', 'scripts'], |
@@ -57,16 +43,18 @@ class TestMbox(base.Base): | |||
57 | 43 | ||
58 | 44 | ||
59 | def test_signed_off_by_presence(self): | 45 | def test_signed_off_by_presence(self): |
60 | for commit in TestMbox.commits: | 46 | for commit in self.commits: |
61 | # skip those patches that revert older commits, these do not required the tag presence | 47 | # skip those patches that revert older commits, these do not required the tag presence |
62 | if self.revert_shortlog_regex.search_string(commit.shortlog): | 48 | if patchtest_patterns.mbox_revert_shortlog_regex.search_string(commit.shortlog): |
63 | continue | 49 | continue |
64 | if not self.signoff_prog.search_string(commit.payload): | 50 | if not patchtest_patterns.signed_off_by.search_string(commit.payload): |
65 | self.fail('Mbox is missing Signed-off-by. Add it manually or with "git commit --amend -s"', | 51 | self.fail( |
66 | commit=commit) | 52 | 'Mbox is missing Signed-off-by. Add it manually or with "git commit --amend -s"', |
53 | commit=commit, | ||
54 | ) | ||
67 | 55 | ||
68 | def test_shortlog_format(self): | 56 | def test_shortlog_format(self): |
69 | for commit in TestMbox.commits: | 57 | for commit in self.commits: |
70 | shortlog = commit.shortlog | 58 | shortlog = commit.shortlog |
71 | if not shortlog.strip(): | 59 | if not shortlog.strip(): |
72 | self.skip('Empty shortlog, no reason to execute shortlog format test') | 60 | self.skip('Empty shortlog, no reason to execute shortlog format test') |
@@ -75,40 +63,54 @@ class TestMbox(base.Base): | |||
75 | if shortlog.startswith('Revert "'): | 63 | if shortlog.startswith('Revert "'): |
76 | continue | 64 | continue |
77 | try: | 65 | try: |
78 | parse_shortlog.shortlog.parseString(shortlog) | 66 | patchtest_patterns.shortlog.parseString(shortlog) |
79 | except pyparsing.ParseException as pe: | 67 | except pyparsing.ParseException as pe: |
80 | self.fail('Commit shortlog (first line of commit message) should follow the format "<target>: <summary>"', | 68 | self.fail('Commit shortlog (first line of commit message) should follow the format "<target>: <summary>"', |
81 | commit=commit) | 69 | commit=commit) |
82 | 70 | ||
83 | def test_shortlog_length(self): | 71 | def test_shortlog_length(self): |
84 | for commit in TestMbox.commits: | 72 | for commit in self.commits: |
85 | # no reason to re-check on revert shortlogs | 73 | # no reason to re-check on revert shortlogs |
86 | shortlog = commit.shortlog | 74 | shortlog = re.sub('^(\[.*?\])+ ', '', commit.shortlog) |
87 | if shortlog.startswith('Revert "'): | 75 | if shortlog.startswith('Revert "'): |
88 | continue | 76 | continue |
89 | l = len(shortlog) | 77 | l = len(shortlog) |
90 | if l > self.maxlength: | 78 | if l > patchtest_patterns.mbox_shortlog_maxlength: |
91 | self.fail('Edit shortlog so that it is %d characters or less (currently %d characters)' % (self.maxlength, l), | 79 | self.fail( |
92 | commit=commit) | 80 | "Edit shortlog so that it is %d characters or less (currently %d characters)" |
81 | % (patchtest_patterns.mbox_shortlog_maxlength, l), | ||
82 | commit=commit, | ||
83 | ) | ||
93 | 84 | ||
94 | def test_series_merge_on_head(self): | 85 | def test_series_merge_on_head(self): |
95 | self.skip("Merge test is disabled for now") | 86 | self.skip("Merge test is disabled for now") |
96 | if PatchTestInput.repo.branch != "master": | 87 | if PatchtestParser.repo.patch.branch != "master": |
97 | self.skip("Skipping merge test since patch is not intended for master branch. Target detected is %s" % PatchTestInput.repo.branch) | 88 | self.skip( |
98 | if not PatchTestInput.repo.ismerged: | 89 | "Skipping merge test since patch is not intended" |
90 | " for master branch. Target detected is %s" | ||
91 | % PatchtestParser.repo.patch.branch | ||
92 | ) | ||
93 | if not PatchtestParser.repo.canbemerged: | ||
99 | commithash, author, date, shortlog = headlog() | 94 | commithash, author, date, shortlog = headlog() |
100 | self.fail('Series does not apply on top of target branch %s' % PatchTestInput.repo.branch, | 95 | self.fail( |
101 | data=[('Targeted branch', '%s (currently at %s)' % (PatchTestInput.repo.branch, commithash))]) | 96 | "Series does not apply on top of target branch %s" |
97 | % PatchtestParser.repo.patch.branch, | ||
98 | data=[ | ||
99 | ( | ||
100 | "Targeted branch", | ||
101 | "%s (currently at %s)" | ||
102 | % (PatchtestParser.repo.patch.branch, commithash), | ||
103 | ) | ||
104 | ], | ||
105 | ) | ||
102 | 106 | ||
103 | def test_target_mailing_list(self): | 107 | def test_target_mailing_list(self): |
104 | """In case of merge failure, check for other targeted projects""" | 108 | """Check for other targeted projects""" |
105 | if PatchTestInput.repo.ismerged: | ||
106 | self.skip('Series merged, no reason to check other mailing lists') | ||
107 | 109 | ||
108 | # a meta project may be indicted in the message subject, if this is the case, just fail | 110 | # a meta project may be indicted in the message subject, if this is the case, just fail |
109 | # TODO: there may be other project with no-meta prefix, we also need to detect these | 111 | # TODO: there may be other project with no-meta prefix, we also need to detect these |
110 | project_regex = pyparsing.Regex("\[(?P<project>meta-.+)\]") | 112 | project_regex = pyparsing.Regex("\[(?P<project>meta-.+)\]") |
111 | for commit in TestMbox.commits: | 113 | for commit in self.commits: |
112 | match = project_regex.search_string(commit.subject) | 114 | match = project_regex.search_string(commit.subject) |
113 | if match: | 115 | if match: |
114 | self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists', | 116 | self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists', |
@@ -136,24 +138,42 @@ class TestMbox(base.Base): | |||
136 | data=[('Diff line',self.unidiff_parse_error)]) | 138 | data=[('Diff line',self.unidiff_parse_error)]) |
137 | 139 | ||
138 | def test_commit_message_presence(self): | 140 | def test_commit_message_presence(self): |
139 | for commit in TestMbox.commits: | 141 | for commit in self.commits: |
140 | if not commit.commit_message.strip(): | 142 | if not commit.commit_message.strip(): |
141 | self.fail('Please include a commit message on your patch explaining the change', commit=commit) | 143 | self.fail('Please include a commit message on your patch explaining the change', commit=commit) |
142 | 144 | ||
145 | # This may incorrectly report a failure if something such as a | ||
146 | # Python decorator is included in the commit message, but this | ||
147 | # scenario is much less common than the username case it is written | ||
148 | # to protect against | ||
149 | def test_commit_message_user_tags(self): | ||
150 | for commit in self.commits: | ||
151 | if patchtest_patterns.mbox_github_username.search_string(commit.commit_message): | ||
152 | self.fail('Mbox includes one or more GitHub-style username tags. Ensure that any "@" symbols are stripped out of usernames', commit=commit) | ||
153 | |||
143 | def test_bugzilla_entry_format(self): | 154 | def test_bugzilla_entry_format(self): |
144 | for commit in TestMbox.commits: | 155 | for commit in self.commits: |
145 | if not self.rexp_detect.search_string(commit.commit_message): | 156 | if not patchtest_patterns.mbox_bugzilla.search_string(commit.commit_message): |
146 | self.skip("No bug ID found") | 157 | self.skip("No bug ID found") |
147 | elif not self.rexp_validation.search_string(commit.commit_message): | 158 | elif not patchtest_patterns.mbox_bugzilla_validation.search_string( |
148 | self.fail('Bugzilla issue ID is not correctly formatted - specify it with format: "[YOCTO #<bugzilla ID>]"', commit=commit) | 159 | commit.commit_message |
160 | ): | ||
161 | self.fail( | ||
162 | 'Bugzilla issue ID is not correctly formatted - specify it with format: "[YOCTO #<bugzilla ID>]"', | ||
163 | commit=commit, | ||
164 | ) | ||
149 | 165 | ||
150 | def test_author_valid(self): | 166 | def test_author_valid(self): |
151 | for commit in self.commits: | 167 | for commit in self.commits: |
152 | for invalid in self.invalids: | 168 | for invalid in patchtest_patterns.invalid_submitters: |
153 | if invalid.search_string(commit.author): | 169 | if invalid.search_string(commit.author): |
154 | self.fail('Invalid author %s. Resend the series with a valid patch author' % commit.author, commit=commit) | 170 | self.fail('Invalid author %s. Resend the series with a valid patch author' % commit.author, commit=commit) |
155 | 171 | ||
156 | def test_non_auh_upgrade(self): | 172 | def test_non_auh_upgrade(self): |
157 | for commit in self.commits: | 173 | for commit in self.commits: |
158 | if self.auh_email in commit.payload: | 174 | if patchtest_patterns.auh_email in commit.commit_message: |
159 | self.fail('Invalid author %s. Resend the series with a valid patch author' % self.auh_email, commit=commit) | 175 | self.fail( |
176 | "Invalid author %s. Resend the series with a valid patch author" | ||
177 | % patchtest_patterns.auh_email, | ||
178 | commit=commit, | ||
179 | ) | ||
diff --git a/meta/lib/patchtest/tests/test_metadata.py b/meta/lib/patchtest/tests/test_metadata.py index f5dbcf01ed..2dee80b002 100644 --- a/meta/lib/patchtest/tests/test_metadata.py +++ b/meta/lib/patchtest/tests/test_metadata.py | |||
@@ -5,28 +5,16 @@ | |||
5 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
6 | 6 | ||
7 | import base | 7 | import base |
8 | import collections | ||
8 | import os | 9 | import os |
10 | import patchtest_patterns | ||
9 | import pyparsing | 11 | import pyparsing |
10 | from data import PatchTestInput, PatchTestDataStore | 12 | from patchtest_parser import PatchtestParser |
13 | |||
14 | # Data store commonly used to share values between pre and post-merge tests | ||
15 | PatchTestDataStore = collections.defaultdict(str) | ||
11 | 16 | ||
12 | class TestMetadata(base.Metadata): | 17 | class TestMetadata(base.Metadata): |
13 | metadata_lic = 'LICENSE' | ||
14 | invalid_license = 'PATCHTESTINVALID' | ||
15 | metadata_chksum = 'LIC_FILES_CHKSUM' | ||
16 | license_var = 'LICENSE' | ||
17 | closed = 'CLOSED' | ||
18 | lictag_re = pyparsing.AtLineStart("License-Update:") | ||
19 | lic_chksum_added = pyparsing.AtLineStart("+" + metadata_chksum) | ||
20 | lic_chksum_removed = pyparsing.AtLineStart("-" + metadata_chksum) | ||
21 | add_mark = pyparsing.Regex('\\+ ') | ||
22 | max_length = 200 | ||
23 | metadata_src_uri = 'SRC_URI' | ||
24 | md5sum = 'md5sum' | ||
25 | sha256sum = 'sha256sum' | ||
26 | git_regex = pyparsing.Regex('^git\\:\\/\\/.*') | ||
27 | metadata_summary = 'SUMMARY' | ||
28 | cve_check_ignore_var = 'CVE_CHECK_IGNORE' | ||
29 | cve_status_var = 'CVE_STATUS' | ||
30 | 18 | ||
31 | def test_license_presence(self): | 19 | def test_license_presence(self): |
32 | if not self.added: | 20 | if not self.added: |
@@ -41,13 +29,13 @@ class TestMetadata(base.Metadata): | |||
41 | open_flag = 'a' | 29 | open_flag = 'a' |
42 | with open(auto_conf, open_flag) as fd: | 30 | with open(auto_conf, open_flag) as fd: |
43 | for pn in self.added: | 31 | for pn in self.added: |
44 | fd.write('LICENSE ??= "%s"\n' % self.invalid_license) | 32 | fd.write('LICENSE ??= "%s"\n' % patchtest_patterns.invalid_license) |
45 | 33 | ||
46 | no_license = False | 34 | no_license = False |
47 | for pn in self.added: | 35 | for pn in self.added: |
48 | rd = self.tinfoil.parse_recipe(pn) | 36 | rd = self.tinfoil.parse_recipe(pn) |
49 | license = rd.getVar(self.metadata_lic) | 37 | license = rd.getVar(patchtest_patterns.metadata_lic) |
50 | if license == self.invalid_license: | 38 | if license == patchtest_patterns.invalid_license: |
51 | no_license = True | 39 | no_license = True |
52 | break | 40 | break |
53 | 41 | ||
@@ -74,11 +62,13 @@ class TestMetadata(base.Metadata): | |||
74 | # we are not interested in images | 62 | # we are not interested in images |
75 | if '/images/' in pathname: | 63 | if '/images/' in pathname: |
76 | continue | 64 | continue |
77 | lic_files_chksum = rd.getVar(self.metadata_chksum) | 65 | lic_files_chksum = rd.getVar(patchtest_patterns.metadata_chksum) |
78 | if rd.getVar(self.license_var) == self.closed: | 66 | if rd.getVar(patchtest_patterns.license_var) == patchtest_patterns.closed: |
79 | continue | 67 | continue |
80 | if not lic_files_chksum: | 68 | if not lic_files_chksum: |
81 | self.fail('%s is missing in newly added recipe' % self.metadata_chksum) | 69 | self.fail( |
70 | "%s is missing in newly added recipe" % patchtest_patterns.metadata_chksum | ||
71 | ) | ||
82 | 72 | ||
83 | def test_lic_files_chksum_modified_not_mentioned(self): | 73 | def test_lic_files_chksum_modified_not_mentioned(self): |
84 | if not self.modified: | 74 | if not self.modified: |
@@ -89,11 +79,13 @@ class TestMetadata(base.Metadata): | |||
89 | if patch.path.endswith('.patch'): | 79 | if patch.path.endswith('.patch'): |
90 | continue | 80 | continue |
91 | payload = str(patch) | 81 | payload = str(patch) |
92 | if (self.lic_chksum_added.search_string(payload) or self.lic_chksum_removed.search_string(payload)): | 82 | if patchtest_patterns.lic_chksum_added.search_string( |
83 | payload | ||
84 | ) or patchtest_patterns.lic_chksum_removed.search_string(payload): | ||
93 | # if any patch on the series contain reference on the metadata, fail | 85 | # if any patch on the series contain reference on the metadata, fail |
94 | for commit in self.commits: | 86 | for commit in self.commits: |
95 | if self.lictag_re.search_string(commit.commit_message): | 87 | if patchtest_patterns.lictag_re.search_string(commit.commit_message): |
96 | break | 88 | break |
97 | else: | 89 | else: |
98 | self.fail('LIC_FILES_CHKSUM changed without "License-Update:" tag and description in commit message') | 90 | self.fail('LIC_FILES_CHKSUM changed without "License-Update:" tag and description in commit message') |
99 | 91 | ||
@@ -104,16 +96,22 @@ class TestMetadata(base.Metadata): | |||
104 | continue | 96 | continue |
105 | payload = str(patch) | 97 | payload = str(patch) |
106 | for line in payload.splitlines(): | 98 | for line in payload.splitlines(): |
107 | if self.add_mark.search_string(line): | 99 | if patchtest_patterns.add_mark.search_string(line): |
108 | current_line_length = len(line[1:]) | 100 | current_line_length = len(line[1:]) |
109 | if current_line_length > self.max_length: | 101 | if current_line_length > patchtest_patterns.patch_max_line_length: |
110 | self.fail('Patch line too long (current length %s, maximum is %s)' % (current_line_length, self.max_length), | 102 | self.fail( |
111 | data=[('Patch', patch.path), ('Line', '%s ...' % line[0:80])]) | 103 | "Patch line too long (current length %s, maximum is %s)" |
104 | % (current_line_length, patchtest_patterns.patch_max_line_length), | ||
105 | data=[ | ||
106 | ("Patch", patch.path), | ||
107 | ("Line", "%s ..." % line[0:80]), | ||
108 | ], | ||
109 | ) | ||
112 | 110 | ||
113 | def pretest_src_uri_left_files(self): | 111 | def pretest_src_uri_left_files(self): |
114 | # these tests just make sense on patches that can be merged | 112 | # these tests just make sense on patches that can be merged |
115 | if not PatchTestInput.repo.canbemerged: | 113 | if not PatchtestParser.repo.canbemerged: |
116 | self.skip('Patch cannot be merged') | 114 | self.skip("Patch cannot be merged") |
117 | if not self.modified: | 115 | if not self.modified: |
118 | self.skip('No modified recipes, skipping pretest') | 116 | self.skip('No modified recipes, skipping pretest') |
119 | 117 | ||
@@ -123,12 +121,14 @@ class TestMetadata(base.Metadata): | |||
123 | if 'core-image' in pn: | 121 | if 'core-image' in pn: |
124 | continue | 122 | continue |
125 | rd = self.tinfoil.parse_recipe(pn) | 123 | rd = self.tinfoil.parse_recipe(pn) |
126 | PatchTestDataStore['%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)] = rd.getVar(self.metadata_src_uri) | 124 | PatchTestDataStore[ |
125 | "%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn) | ||
126 | ] = rd.getVar(patchtest_patterns.metadata_src_uri) | ||
127 | 127 | ||
128 | def test_src_uri_left_files(self): | 128 | def test_src_uri_left_files(self): |
129 | # these tests just make sense on patches that can be merged | 129 | # these tests just make sense on patches that can be merged |
130 | if not PatchTestInput.repo.canbemerged: | 130 | if not PatchtestParser.repo.canbemerged: |
131 | self.skip('Patch cannot be merged') | 131 | self.skip("Patch cannot be merged") |
132 | if not self.modified: | 132 | if not self.modified: |
133 | self.skip('No modified recipes, skipping pretest') | 133 | self.skip('No modified recipes, skipping pretest') |
134 | 134 | ||
@@ -138,11 +138,17 @@ class TestMetadata(base.Metadata): | |||
138 | if 'core-image' in pn: | 138 | if 'core-image' in pn: |
139 | continue | 139 | continue |
140 | rd = self.tinfoil.parse_recipe(pn) | 140 | rd = self.tinfoil.parse_recipe(pn) |
141 | PatchTestDataStore['%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)] = rd.getVar(self.metadata_src_uri) | 141 | PatchTestDataStore[ |
142 | "%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn) | ||
143 | ] = rd.getVar(patchtest_patterns.metadata_src_uri) | ||
142 | 144 | ||
143 | for pn in self.modified: | 145 | for pn in self.modified: |
144 | pretest_src_uri = PatchTestDataStore['pre%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)].split() | 146 | pretest_src_uri = PatchTestDataStore[ |
145 | test_src_uri = PatchTestDataStore['%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)].split() | 147 | "pre%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn) |
148 | ].split() | ||
149 | test_src_uri = PatchTestDataStore[ | ||
150 | "%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn) | ||
151 | ].split() | ||
146 | 152 | ||
147 | pretest_files = set([os.path.basename(patch) for patch in pretest_src_uri if patch.startswith('file://')]) | 153 | pretest_files = set([os.path.basename(patch) for patch in pretest_src_uri if patch.startswith('file://')]) |
148 | test_files = set([os.path.basename(patch) for patch in test_src_uri if patch.startswith('file://')]) | 154 | test_files = set([os.path.basename(patch) for patch in test_src_uri if patch.startswith('file://')]) |
@@ -175,23 +181,32 @@ class TestMetadata(base.Metadata): | |||
175 | if 'core-image' in pn: | 181 | if 'core-image' in pn: |
176 | continue | 182 | continue |
177 | rd = self.tinfoil.parse_recipe(pn) | 183 | rd = self.tinfoil.parse_recipe(pn) |
178 | summary = rd.getVar(self.metadata_summary) | 184 | summary = rd.getVar(patchtest_patterns.metadata_summary) |
179 | 185 | ||
180 | # "${PN} version ${PN}-${PR}" is the default, so fail if default | 186 | # "${PN} version ${PN}-${PR}" is the default, so fail if default |
181 | if summary.startswith('%s version' % pn): | 187 | if summary.startswith("%s version" % pn): |
182 | self.fail('%s is missing in newly added recipe' % self.metadata_summary) | 188 | self.fail( |
189 | "%s is missing in newly added recipe" % patchtest_patterns.metadata_summary | ||
190 | ) | ||
183 | 191 | ||
184 | def test_cve_check_ignore(self): | 192 | def test_cve_check_ignore(self): |
185 | # Skip if we neither modified a recipe or target branches are not | 193 | # Skip if we neither modified a recipe or target branches are not |
186 | # Nanbield and newer. CVE_CHECK_IGNORE was first deprecated in Nanbield. | 194 | # Nanbield and newer. CVE_CHECK_IGNORE was first deprecated in Nanbield. |
187 | if not self.modified or PatchTestInput.repo.branch == "kirkstone" or PatchTestInput.repo.branch == "dunfell": | 195 | if ( |
188 | self.skip('No modified recipes or older target branch, skipping test') | 196 | not self.modified |
197 | or PatchtestParser.repo.patch.branch == "kirkstone" | ||
198 | or PatchtestParser.repo.patch.branch == "dunfell" | ||
199 | ): | ||
200 | self.skip("No modified recipes or older target branch, skipping test") | ||
189 | for pn in self.modified: | 201 | for pn in self.modified: |
190 | # we are not interested in images | 202 | # we are not interested in images |
191 | if 'core-image' in pn: | 203 | if 'core-image' in pn: |
192 | continue | 204 | continue |
193 | rd = self.tinfoil.parse_recipe(pn) | 205 | rd = self.tinfoil.parse_recipe(pn) |
194 | cve_check_ignore = rd.getVar(self.cve_check_ignore_var) | 206 | cve_check_ignore = rd.getVar(patchtest_patterns.cve_check_ignore_var) |
195 | 207 | ||
196 | if cve_check_ignore is not None: | 208 | if cve_check_ignore is not None: |
197 | self.fail('%s is deprecated and should be replaced by %s' % (self.cve_check_ignore_var, self.cve_status_var)) | 209 | self.fail( |
210 | "%s is deprecated and should be replaced by %s" | ||
211 | % (patchtest_patterns.cve_check_ignore_var, patchtest_patterns.cve_status_var) | ||
212 | ) | ||
diff --git a/meta/lib/patchtest/tests/test_patch.py b/meta/lib/patchtest/tests/test_patch.py index d7187a0cb1..d08b8a5019 100644 --- a/meta/lib/patchtest/tests/test_patch.py +++ b/meta/lib/patchtest/tests/test_patch.py | |||
@@ -7,16 +7,11 @@ | |||
7 | 7 | ||
8 | import base | 8 | import base |
9 | import os | 9 | import os |
10 | import parse_signed_off_by | 10 | import patchtest_patterns |
11 | import parse_upstream_status | ||
12 | import pyparsing | 11 | import pyparsing |
13 | 12 | ||
14 | class TestPatch(base.Base): | 13 | class TestPatch(base.Base): |
15 | 14 | ||
16 | re_cve_pattern = pyparsing.Regex("CVE\-\d{4}\-\d+") | ||
17 | re_cve_payload_tag = pyparsing.Regex("\+CVE:(\s+CVE\-\d{4}\-\d+)+") | ||
18 | upstream_status_regex = pyparsing.AtLineStart("+" + "Upstream-Status") | ||
19 | |||
20 | @classmethod | 15 | @classmethod |
21 | def setUpClassLocal(cls): | 16 | def setUpClassLocal(cls): |
22 | cls.newpatches = [] | 17 | cls.newpatches = [] |
@@ -25,17 +20,17 @@ class TestPatch(base.Base): | |||
25 | if patch.path.endswith('.patch') and patch.is_added_file: | 20 | if patch.path.endswith('.patch') and patch.is_added_file: |
26 | cls.newpatches.append(patch) | 21 | cls.newpatches.append(patch) |
27 | 22 | ||
28 | cls.mark = str(parse_signed_off_by.signed_off_by_mark).strip('"') | 23 | cls.mark = str(patchtest_patterns.signed_off_by_prefix).strip('"') |
29 | 24 | ||
30 | # match PatchSignedOffBy.mark with '+' preceding it | 25 | # match PatchSignedOffBy.mark with '+' preceding it |
31 | cls.prog = parse_signed_off_by.patch_signed_off_by | 26 | cls.prog = patchtest_patterns.patch_signed_off_by |
32 | 27 | ||
33 | def setUp(self): | 28 | def setUp(self): |
34 | if self.unidiff_parse_error: | 29 | if self.unidiff_parse_error: |
35 | self.skip('Parse error %s' % self.unidiff_parse_error) | 30 | self.skip('Parse error %s' % self.unidiff_parse_error) |
36 | 31 | ||
37 | self.valid_status = ', '.join(parse_upstream_status.upstream_status_nonliteral_valid_status) | 32 | self.valid_status = ", ".join(patchtest_patterns.upstream_status_nonliteral_valid_status) |
38 | self.standard_format = 'Upstream-Status: <Valid status>' | 33 | self.standard_format = "Upstream-Status: <Valid status>" |
39 | 34 | ||
40 | # we are just interested in series that introduce CVE patches, thus discard other | 35 | # we are just interested in series that introduce CVE patches, thus discard other |
41 | # possibilities: modification to current CVEs, patch directly introduced into the | 36 | # possibilities: modification to current CVEs, patch directly introduced into the |
@@ -50,31 +45,62 @@ class TestPatch(base.Base): | |||
50 | 45 | ||
51 | for newpatch in TestPatch.newpatches: | 46 | for newpatch in TestPatch.newpatches: |
52 | payload = newpatch.__str__() | 47 | payload = newpatch.__str__() |
53 | if not self.upstream_status_regex.search_string(payload): | 48 | if not patchtest_patterns.upstream_status_regex.search_string(payload): |
54 | self.fail('Added patch file is missing Upstream-Status: <Valid status> in the commit message', | 49 | self.fail( |
55 | data=[('Standard format', self.standard_format), ('Valid status', self.valid_status)]) | 50 | "Added patch file is missing Upstream-Status: <Valid status> in the commit message", |
51 | data=[ | ||
52 | ("Standard format", self.standard_format), | ||
53 | ("Valid status", self.valid_status), | ||
54 | ], | ||
55 | ) | ||
56 | for line in payload.splitlines(): | 56 | for line in payload.splitlines(): |
57 | if self.patchmetadata_regex.match(line): | 57 | if patchtest_patterns.patchmetadata_regex.match(line): |
58 | continue | 58 | continue |
59 | if self.upstream_status_regex.search_string(line): | 59 | if patchtest_patterns.upstream_status_regex.search_string(line): |
60 | if parse_upstream_status.inappropriate_status_mark.searchString(line): | 60 | if patchtest_patterns.inappropriate.searchString(line): |
61 | try: | 61 | try: |
62 | parse_upstream_status.upstream_status_inappropriate_info.parseString(line.lstrip('+')) | 62 | patchtest_patterns.upstream_status_inappropriate_info.parseString( |
63 | except pyparsing.ParseException as pe: | 63 | line.lstrip("+") |
64 | self.fail('Upstream-Status is Inappropriate, but no reason was provided', | 64 | ) |
65 | data=[('Current', pe.pstr), ('Standard format', 'Upstream-Status: Inappropriate [reason]')]) | 65 | except pyparsing.ParseException as pe: |
66 | elif parse_upstream_status.submitted_status_mark.searchString(line): | 66 | self.fail( |
67 | try: | 67 | "Upstream-Status is Inappropriate, but no reason was provided", |
68 | parse_upstream_status.upstream_status_submitted_info.parseString(line.lstrip('+')) | 68 | data=[ |
69 | except pyparsing.ParseException as pe: | 69 | ("Current", pe.pstr), |
70 | self.fail('Upstream-Status is Submitted, but it is not mentioned where', | 70 | ( |
71 | data=[('Current', pe.pstr), ('Standard format', 'Upstream-Status: Submitted [where]')]) | 71 | "Standard format", |
72 | else: | 72 | "Upstream-Status: Inappropriate [reason]", |
73 | try: | 73 | ), |
74 | parse_upstream_status.upstream_status.parseString(line.lstrip('+')) | 74 | ], |
75 | except pyparsing.ParseException as pe: | 75 | ) |
76 | self.fail('Upstream-Status is in incorrect format', | 76 | elif patchtest_patterns.submitted.searchString(line): |
77 | data=[('Current', pe.pstr), ('Standard format', self.standard_format), ('Valid status', self.valid_status)]) | 77 | try: |
78 | patchtest_patterns.upstream_status_submitted_info.parseString( | ||
79 | line.lstrip("+") | ||
80 | ) | ||
81 | except pyparsing.ParseException as pe: | ||
82 | self.fail( | ||
83 | "Upstream-Status is Submitted, but it is not mentioned where", | ||
84 | data=[ | ||
85 | ("Current", pe.pstr), | ||
86 | ( | ||
87 | "Standard format", | ||
88 | "Upstream-Status: Submitted [where]", | ||
89 | ), | ||
90 | ], | ||
91 | ) | ||
92 | else: | ||
93 | try: | ||
94 | patchtest_patterns.upstream_status.parseString(line.lstrip("+")) | ||
95 | except pyparsing.ParseException as pe: | ||
96 | self.fail( | ||
97 | "Upstream-Status is in incorrect format", | ||
98 | data=[ | ||
99 | ("Current", pe.pstr), | ||
100 | ("Standard format", self.standard_format), | ||
101 | ("Valid status", self.valid_status), | ||
102 | ], | ||
103 | ) | ||
78 | 104 | ||
79 | def test_signed_off_by_presence(self): | 105 | def test_signed_off_by_presence(self): |
80 | if not TestPatch.newpatches: | 106 | if not TestPatch.newpatches: |
@@ -83,7 +109,7 @@ class TestPatch(base.Base): | |||
83 | for newpatch in TestPatch.newpatches: | 109 | for newpatch in TestPatch.newpatches: |
84 | payload = newpatch.__str__() | 110 | payload = newpatch.__str__() |
85 | for line in payload.splitlines(): | 111 | for line in payload.splitlines(): |
86 | if self.patchmetadata_regex.match(line): | 112 | if patchtest_patterns.patchmetadata_regex.match(line): |
87 | continue | 113 | continue |
88 | if TestPatch.prog.search_string(payload): | 114 | if TestPatch.prog.search_string(payload): |
89 | break | 115 | break |
@@ -92,10 +118,12 @@ class TestPatch(base.Base): | |||
92 | 118 | ||
93 | def test_cve_tag_format(self): | 119 | def test_cve_tag_format(self): |
94 | for commit in TestPatch.commits: | 120 | for commit in TestPatch.commits: |
95 | if self.re_cve_pattern.search_string(commit.shortlog) or self.re_cve_pattern.search_string(commit.commit_message): | 121 | if patchtest_patterns.cve.search_string( |
122 | commit.shortlog | ||
123 | ) or patchtest_patterns.cve.search_string(commit.commit_message): | ||
96 | tag_found = False | 124 | tag_found = False |
97 | for line in commit.payload.splitlines(): | 125 | for line in commit.payload.splitlines(): |
98 | if self.re_cve_payload_tag.search_string(line): | 126 | if patchtest_patterns.cve_payload_tag.search_string(line): |
99 | tag_found = True | 127 | tag_found = True |
100 | break | 128 | break |
101 | if not tag_found: | 129 | if not tag_found: |
diff --git a/meta/lib/patchtest/tests/test_python_pylint.py b/meta/lib/patchtest/tests/test_python_pylint.py index ef315e591c..ec9129bc79 100644 --- a/meta/lib/patchtest/tests/test_python_pylint.py +++ b/meta/lib/patchtest/tests/test_python_pylint.py | |||
@@ -6,7 +6,7 @@ | |||
6 | 6 | ||
7 | import base | 7 | import base |
8 | from io import StringIO | 8 | from io import StringIO |
9 | from data import PatchTestInput | 9 | from patchtest_parser import PatchtestParser |
10 | from pylint.reporters.text import TextReporter | 10 | from pylint.reporters.text import TextReporter |
11 | import pylint.lint as lint | 11 | import pylint.lint as lint |
12 | 12 | ||
diff --git a/meta/lib/patchtest/utils.py b/meta/lib/patchtest/utils.py deleted file mode 100644 index 8eddf3e85f..0000000000 --- a/meta/lib/patchtest/utils.py +++ /dev/null | |||
@@ -1,61 +0,0 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | # | ||
4 | # utils: common methods used by the patchtest framework | ||
5 | # | ||
6 | # Copyright (C) 2016 Intel Corporation | ||
7 | # | ||
8 | # SPDX-License-Identifier: GPL-2.0-only | ||
9 | # | ||
10 | |||
11 | import os | ||
12 | import subprocess | ||
13 | import logging | ||
14 | import re | ||
15 | import mailbox | ||
16 | |||
17 | def logger_create(name): | ||
18 | logger = logging.getLogger(name) | ||
19 | loggerhandler = logging.StreamHandler() | ||
20 | loggerhandler.setFormatter(logging.Formatter("%(message)s")) | ||
21 | logger.addHandler(loggerhandler) | ||
22 | logger.setLevel(logging.INFO) | ||
23 | return logger | ||
24 | |||
25 | def valid_branch(branch): | ||
26 | """ Check if branch is valid name """ | ||
27 | lbranch = branch.lower() | ||
28 | |||
29 | invalid = lbranch.startswith('patch') or \ | ||
30 | lbranch.startswith('rfc') or \ | ||
31 | lbranch.startswith('resend') or \ | ||
32 | re.search(r'^v\d+', lbranch) or \ | ||
33 | re.search(r'^\d+/\d+', lbranch) | ||
34 | |||
35 | return not invalid | ||
36 | |||
37 | def get_branch(path): | ||
38 | """ Get the branch name from mbox """ | ||
39 | fullprefix = "" | ||
40 | mbox = mailbox.mbox(path) | ||
41 | |||
42 | if len(mbox): | ||
43 | subject = mbox[0]['subject'] | ||
44 | if subject: | ||
45 | pattern = re.compile(r"(\[.*\])", re.DOTALL) | ||
46 | match = pattern.search(subject) | ||
47 | if match: | ||
48 | fullprefix = match.group(1) | ||
49 | |||
50 | branch, branches, valid_branches = None, [], [] | ||
51 | |||
52 | if fullprefix: | ||
53 | prefix = fullprefix.strip('[]') | ||
54 | branches = [ b.strip() for b in prefix.split(',')] | ||
55 | valid_branches = [b for b in branches if valid_branch(b)] | ||
56 | |||
57 | if len(valid_branches): | ||
58 | branch = valid_branches[0] | ||
59 | |||
60 | return branch | ||
61 | |||