diff options
Diffstat (limited to 'scripts/contrib')
23 files changed, 1033 insertions, 253 deletions
diff --git a/scripts/contrib/bb-perf/bb-matrix-plot.sh b/scripts/contrib/bb-perf/bb-matrix-plot.sh index e7bd129e9e..6672189c95 100755 --- a/scripts/contrib/bb-perf/bb-matrix-plot.sh +++ b/scripts/contrib/bb-perf/bb-matrix-plot.sh | |||
@@ -16,8 +16,8 @@ | |||
16 | 16 | ||
17 | # Setup the defaults | 17 | # Setup the defaults |
18 | DATFILE="bb-matrix.dat" | 18 | DATFILE="bb-matrix.dat" |
19 | XLABEL="BB_NUMBER_THREADS" | 19 | XLABEL="BB\\\\_NUMBER\\\\_THREADS" |
20 | YLABEL="PARALLEL_MAKE" | 20 | YLABEL="PARALLEL\\\\_MAKE" |
21 | FIELD=3 | 21 | FIELD=3 |
22 | DEF_TITLE="Elapsed Time (seconds)" | 22 | DEF_TITLE="Elapsed Time (seconds)" |
23 | PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100" | 23 | PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100" |
diff --git a/scripts/contrib/bbvars.py b/scripts/contrib/bbvars.py index 090133600b..a9cdf082ab 100755 --- a/scripts/contrib/bbvars.py +++ b/scripts/contrib/bbvars.py | |||
@@ -36,8 +36,8 @@ def bbvar_is_documented(var, documented_vars): | |||
36 | def collect_documented_vars(docfiles): | 36 | def collect_documented_vars(docfiles): |
37 | ''' Walk the docfiles and collect the documented variables ''' | 37 | ''' Walk the docfiles and collect the documented variables ''' |
38 | documented_vars = [] | 38 | documented_vars = [] |
39 | prog = re.compile(".*($|[^A-Z_])<glossentry id=\'var-") | 39 | prog = re.compile(r".*($|[^A-Z_])<glossentry id=\'var-") |
40 | var_prog = re.compile('<glossentry id=\'var-(.*)\'>') | 40 | var_prog = re.compile(r'<glossentry id=\'var-(.*)\'>') |
41 | for d in docfiles: | 41 | for d in docfiles: |
42 | with open(d) as f: | 42 | with open(d) as f: |
43 | documented_vars += var_prog.findall(f.read()) | 43 | documented_vars += var_prog.findall(f.read()) |
@@ -45,7 +45,7 @@ def collect_documented_vars(docfiles): | |||
45 | return documented_vars | 45 | return documented_vars |
46 | 46 | ||
47 | def bbvar_doctag(var, docconf): | 47 | def bbvar_doctag(var, docconf): |
48 | prog = re.compile('^%s\[doc\] *= *"(.*)"' % (var)) | 48 | prog = re.compile(r'^%s\[doc\] *= *"(.*)"' % (var)) |
49 | if docconf == "": | 49 | if docconf == "": |
50 | return "?" | 50 | return "?" |
51 | 51 | ||
diff --git a/scripts/contrib/build-perf-test-wrapper.sh b/scripts/contrib/build-perf-test-wrapper.sh index fa71d4a2e9..0a85e6e708 100755 --- a/scripts/contrib/build-perf-test-wrapper.sh +++ b/scripts/contrib/build-perf-test-wrapper.sh | |||
@@ -87,21 +87,10 @@ if [ $# -ne 0 ]; then | |||
87 | exit 1 | 87 | exit 1 |
88 | fi | 88 | fi |
89 | 89 | ||
90 | if [ -n "$email_to" ]; then | ||
91 | if ! [ -x "$(command -v phantomjs)" ]; then | ||
92 | echo "ERROR: Sending email needs phantomjs." | ||
93 | exit 1 | ||
94 | fi | ||
95 | if ! [ -x "$(command -v optipng)" ]; then | ||
96 | echo "ERROR: Sending email needs optipng." | ||
97 | exit 1 | ||
98 | fi | ||
99 | fi | ||
100 | |||
101 | # Open a file descriptor for flock and acquire lock | 90 | # Open a file descriptor for flock and acquire lock |
102 | LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock" | 91 | LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock" |
103 | if ! exec 3> "$LOCK_FILE"; then | 92 | if ! exec 3> "$LOCK_FILE"; then |
104 | echo "ERROR: Unable to open lock file" | 93 | echo "ERROR: Unable to open loemack file" |
105 | exit 1 | 94 | exit 1 |
106 | fi | 95 | fi |
107 | if ! flock -n 3; then | 96 | if ! flock -n 3; then |
@@ -226,7 +215,7 @@ if [ -n "$results_repo" ]; then | |||
226 | if [ -n "$email_to" ]; then | 215 | if [ -n "$email_to" ]; then |
227 | echo "Emailing test report" | 216 | echo "Emailing test report" |
228 | os_name=`get_os_release_var PRETTY_NAME` | 217 | os_name=`get_os_release_var PRETTY_NAME` |
229 | "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt --html $report_html "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}" | 218 | "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}" |
230 | fi | 219 | fi |
231 | 220 | ||
232 | # Upload report files, unless we're on detached head | 221 | # Upload report files, unless we're on detached head |
diff --git a/scripts/contrib/convert-overrides.py b/scripts/contrib/convert-overrides.py index 4d41a4c475..c69acb4095 100755 --- a/scripts/contrib/convert-overrides.py +++ b/scripts/contrib/convert-overrides.py | |||
@@ -22,66 +22,78 @@ import sys | |||
22 | import tempfile | 22 | import tempfile |
23 | import shutil | 23 | import shutil |
24 | import mimetypes | 24 | import mimetypes |
25 | import argparse | ||
25 | 26 | ||
26 | if len(sys.argv) < 2: | 27 | parser = argparse.ArgumentParser(description="Convert override syntax") |
27 | print("Please specify a directory to run the conversion script against.") | 28 | parser.add_argument("--override", "-o", action="append", default=[], help="Add additional strings to consider as an override (e.g. custom machines/distros") |
28 | sys.exit(1) | 29 | parser.add_argument("--skip", "-s", action="append", default=[], help="Add additional string to skip and not consider an override") |
30 | parser.add_argument("--skip-ext", "-e", action="append", default=[], help="Additional file suffixes to skip when processing (e.g. '.foo')") | ||
31 | parser.add_argument("--package-vars", action="append", default=[], help="Additional variables to treat as package variables") | ||
32 | parser.add_argument("--image-vars", action="append", default=[], help="Additional variables to treat as image variables") | ||
33 | parser.add_argument("--short-override", action="append", default=[], help="Additional strings to treat as short overrides") | ||
34 | parser.add_argument("path", nargs="+", help="Paths to convert") | ||
35 | |||
36 | args = parser.parse_args() | ||
29 | 37 | ||
30 | # List of strings to treat as overrides | 38 | # List of strings to treat as overrides |
31 | vars = ["append", "prepend", "remove"] | 39 | vars = args.override |
32 | vars = vars + ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"] | 40 | vars += ["append", "prepend", "remove"] |
33 | vars = vars + ["genericx86", "edgerouter", "beaglebone-yocto"] | 41 | vars += ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"] |
34 | vars = vars + ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"] | 42 | vars += ["genericx86", "edgerouter", "beaglebone-yocto"] |
35 | vars = vars + ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"] | 43 | vars += ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"] |
36 | vars = vars + ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"] | 44 | vars += ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"] |
37 | vars = vars + ["tune-", "pn-", "forcevariable"] | 45 | vars += ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"] |
38 | vars = vars + ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"] | 46 | vars += ["tune-", "pn-", "forcevariable"] |
39 | vars = vars + ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"] | 47 | vars += ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"] |
40 | vars = vars + ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"] | 48 | vars += ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"] |
41 | vars = vars + ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"] | 49 | vars += ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"] |
42 | vars = vars + ["linux-gnueabi", "eabi"] | 50 | vars += ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"] |
43 | vars = vars + ["virtclass-multilib", "virtclass-mcextend"] | 51 | vars += ["linux-gnueabi", "eabi"] |
52 | vars += ["virtclass-multilib", "virtclass-mcextend"] | ||
44 | 53 | ||
45 | # List of strings to treat as overrides but only with whitespace following or another override (more restricted matching). | 54 | # List of strings to treat as overrides but only with whitespace following or another override (more restricted matching). |
46 | # Handles issues with arc matching arch. | 55 | # Handles issues with arc matching arch. |
47 | shortvars = ["arc", "mips", "mipsel", "sh4"] | 56 | shortvars = ["arc", "mips", "mipsel", "sh4"] + args.short_override |
48 | 57 | ||
49 | # Variables which take packagenames as an override | 58 | # Variables which take packagenames as an override |
50 | packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY", | 59 | packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY", |
51 | "pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE", | 60 | "pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE", |
52 | "PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst", | 61 | "PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst", |
53 | "pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA", | 62 | "pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA", |
54 | "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"] | 63 | "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"] + args.package_vars |
55 | 64 | ||
56 | # Expressions to skip if encountered, these are not overrides | 65 | # Expressions to skip if encountered, these are not overrides |
57 | skips = ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"] | 66 | skips = args.skip |
58 | skips = skips + ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"] | 67 | skips += ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"] |
59 | skips = skips + ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"] | 68 | skips += ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"] |
60 | skips = skips + ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"] | 69 | skips += ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"] |
61 | skips = skips + ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"] | 70 | skips += ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"] |
62 | skips = skips + ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"] | 71 | skips += ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"] |
63 | skips = skips + ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"] | 72 | skips += ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"] |
64 | skips = skips + ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"] | 73 | skips += ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"] |
65 | skips = skips + ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"] | 74 | skips += ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"] |
66 | 75 | skips += ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"] | |
67 | imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"] | 76 | |
68 | packagevars = packagevars + imagevars | 77 | imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"] + args.image_vars |
78 | packagevars += imagevars | ||
79 | |||
80 | skip_ext = [".html", ".patch", ".m4", ".diff"] + args.skip_ext | ||
69 | 81 | ||
70 | vars_re = {} | 82 | vars_re = {} |
71 | for exp in vars: | 83 | for exp in vars: |
72 | vars_re[exp] = (re.compile('((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp) | 84 | vars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp) |
73 | 85 | ||
74 | shortvars_re = {} | 86 | shortvars_re = {} |
75 | for exp in shortvars: | 87 | for exp in shortvars: |
76 | shortvars_re[exp] = (re.compile('((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + '([\(\'"\s:])'), r"\1:" + exp + r"\3") | 88 | shortvars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + r'([\(\'"\s:])'), r"\1:" + exp + r"\3") |
77 | 89 | ||
78 | package_re = {} | 90 | package_re = {} |
79 | for exp in packagevars: | 91 | for exp in packagevars: |
80 | package_re[exp] = (re.compile('(^|[#\'"\s\-\+]+)' + exp + '_' + '([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2") | 92 | package_re[exp] = (re.compile(r'(^|[#\'"\s\-\+]+)' + exp + r'_' + r'([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2") |
81 | 93 | ||
82 | # Other substitutions to make | 94 | # Other substitutions to make |
83 | subs = { | 95 | subs = { |
84 | 'r = re.compile("([^:]+):\s*(.*)")' : 'r = re.compile("(^.+?):\s+(.*)")', | 96 | 'r = re.compile(r"([^:]+):\s*(.*)")' : 'r = re.compile(r"(^.+?):\s+(.*)")', |
85 | "val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))", | 97 | "val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))", |
86 | "f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))", | 98 | "f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))", |
87 | "d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))", | 99 | "d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))", |
@@ -124,21 +136,20 @@ def processfile(fn): | |||
124 | ourname = os.path.basename(sys.argv[0]) | 136 | ourname = os.path.basename(sys.argv[0]) |
125 | ourversion = "0.9.3" | 137 | ourversion = "0.9.3" |
126 | 138 | ||
127 | if os.path.isfile(sys.argv[1]): | 139 | for p in args.path: |
128 | processfile(sys.argv[1]) | 140 | if os.path.isfile(p): |
129 | sys.exit(0) | 141 | processfile(p) |
130 | 142 | else: | |
131 | for targetdir in sys.argv[1:]: | 143 | print("processing directory '%s'" % p) |
132 | print("processing directory '%s'" % targetdir) | 144 | for root, dirs, files in os.walk(p): |
133 | for root, dirs, files in os.walk(targetdir): | 145 | for name in files: |
134 | for name in files: | 146 | if name == ourname: |
135 | if name == ourname: | 147 | continue |
136 | continue | 148 | fn = os.path.join(root, name) |
137 | fn = os.path.join(root, name) | 149 | if os.path.islink(fn): |
138 | if os.path.islink(fn): | 150 | continue |
139 | continue | 151 | if "/.git/" in fn or any(fn.endswith(ext) for ext in skip_ext): |
140 | if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"): | 152 | continue |
141 | continue | 153 | processfile(fn) |
142 | processfile(fn) | ||
143 | 154 | ||
144 | print("All files processed with version %s" % ourversion) | 155 | print("All files processed with version %s" % ourversion) |
diff --git a/scripts/contrib/convert-spdx-licenses.py b/scripts/contrib/convert-spdx-licenses.py new file mode 100755 index 0000000000..13cf12a33f --- /dev/null +++ b/scripts/contrib/convert-spdx-licenses.py | |||
@@ -0,0 +1,145 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # | ||
3 | # Conversion script to change LICENSE entries to SPDX identifiers | ||
4 | # | ||
5 | # Copyright (C) 2021-2022 Richard Purdie | ||
6 | # | ||
7 | # SPDX-License-Identifier: GPL-2.0-only | ||
8 | # | ||
9 | |||
10 | import re | ||
11 | import os | ||
12 | import sys | ||
13 | import tempfile | ||
14 | import shutil | ||
15 | import mimetypes | ||
16 | |||
17 | if len(sys.argv) < 2: | ||
18 | print("Please specify a directory to run the conversion script against.") | ||
19 | sys.exit(1) | ||
20 | |||
21 | license_map = { | ||
22 | "AGPL-3" : "AGPL-3.0-only", | ||
23 | "AGPL-3+" : "AGPL-3.0-or-later", | ||
24 | "AGPLv3" : "AGPL-3.0-only", | ||
25 | "AGPLv3+" : "AGPL-3.0-or-later", | ||
26 | "AGPLv3.0" : "AGPL-3.0-only", | ||
27 | "AGPLv3.0+" : "AGPL-3.0-or-later", | ||
28 | "AGPL-3.0" : "AGPL-3.0-only", | ||
29 | "AGPL-3.0+" : "AGPL-3.0-or-later", | ||
30 | "BSD-0-Clause" : "0BSD", | ||
31 | "GPL-1" : "GPL-1.0-only", | ||
32 | "GPL-1+" : "GPL-1.0-or-later", | ||
33 | "GPLv1" : "GPL-1.0-only", | ||
34 | "GPLv1+" : "GPL-1.0-or-later", | ||
35 | "GPLv1.0" : "GPL-1.0-only", | ||
36 | "GPLv1.0+" : "GPL-1.0-or-later", | ||
37 | "GPL-1.0" : "GPL-1.0-only", | ||
38 | "GPL-1.0+" : "GPL-1.0-or-later", | ||
39 | "GPL-2" : "GPL-2.0-only", | ||
40 | "GPL-2+" : "GPL-2.0-or-later", | ||
41 | "GPLv2" : "GPL-2.0-only", | ||
42 | "GPLv2+" : "GPL-2.0-or-later", | ||
43 | "GPLv2.0" : "GPL-2.0-only", | ||
44 | "GPLv2.0+" : "GPL-2.0-or-later", | ||
45 | "GPL-2.0" : "GPL-2.0-only", | ||
46 | "GPL-2.0+" : "GPL-2.0-or-later", | ||
47 | "GPL-3" : "GPL-3.0-only", | ||
48 | "GPL-3+" : "GPL-3.0-or-later", | ||
49 | "GPLv3" : "GPL-3.0-only", | ||
50 | "GPLv3+" : "GPL-3.0-or-later", | ||
51 | "GPLv3.0" : "GPL-3.0-only", | ||
52 | "GPLv3.0+" : "GPL-3.0-or-later", | ||
53 | "GPL-3.0" : "GPL-3.0-only", | ||
54 | "GPL-3.0+" : "GPL-3.0-or-later", | ||
55 | "LGPLv2" : "LGPL-2.0-only", | ||
56 | "LGPLv2+" : "LGPL-2.0-or-later", | ||
57 | "LGPLv2.0" : "LGPL-2.0-only", | ||
58 | "LGPLv2.0+" : "LGPL-2.0-or-later", | ||
59 | "LGPL-2.0" : "LGPL-2.0-only", | ||
60 | "LGPL-2.0+" : "LGPL-2.0-or-later", | ||
61 | "LGPL2.1" : "LGPL-2.1-only", | ||
62 | "LGPL2.1+" : "LGPL-2.1-or-later", | ||
63 | "LGPLv2.1" : "LGPL-2.1-only", | ||
64 | "LGPLv2.1+" : "LGPL-2.1-or-later", | ||
65 | "LGPL-2.1" : "LGPL-2.1-only", | ||
66 | "LGPL-2.1+" : "LGPL-2.1-or-later", | ||
67 | "LGPLv3" : "LGPL-3.0-only", | ||
68 | "LGPLv3+" : "LGPL-3.0-or-later", | ||
69 | "LGPL-3.0" : "LGPL-3.0-only", | ||
70 | "LGPL-3.0+" : "LGPL-3.0-or-later", | ||
71 | "MPL-1" : "MPL-1.0", | ||
72 | "MPLv1" : "MPL-1.0", | ||
73 | "MPLv1.1" : "MPL-1.1", | ||
74 | "MPLv2" : "MPL-2.0", | ||
75 | "MIT-X" : "MIT", | ||
76 | "MIT-style" : "MIT", | ||
77 | "openssl" : "OpenSSL", | ||
78 | "PSF" : "PSF-2.0", | ||
79 | "PSFv2" : "PSF-2.0", | ||
80 | "Python-2" : "Python-2.0", | ||
81 | "Apachev2" : "Apache-2.0", | ||
82 | "Apache-2" : "Apache-2.0", | ||
83 | "Artisticv1" : "Artistic-1.0", | ||
84 | "Artistic-1" : "Artistic-1.0", | ||
85 | "AFL-2" : "AFL-2.0", | ||
86 | "AFL-1" : "AFL-1.2", | ||
87 | "AFLv2" : "AFL-2.0", | ||
88 | "AFLv1" : "AFL-1.2", | ||
89 | "CDDLv1" : "CDDL-1.0", | ||
90 | "CDDL-1" : "CDDL-1.0", | ||
91 | "EPLv1.0" : "EPL-1.0", | ||
92 | "FreeType" : "FTL", | ||
93 | "Nauman" : "Naumen", | ||
94 | "tcl" : "TCL", | ||
95 | "vim" : "Vim", | ||
96 | "SGIv1" : "SGI-OpenGL", | ||
97 | } | ||
98 | |||
99 | def processfile(fn): | ||
100 | print("processing file '%s'" % fn) | ||
101 | try: | ||
102 | fh, abs_path = tempfile.mkstemp() | ||
103 | modified = False | ||
104 | with os.fdopen(fh, 'w') as new_file: | ||
105 | with open(fn, "r") as old_file: | ||
106 | for line in old_file: | ||
107 | if not line.startswith("LICENSE"): | ||
108 | new_file.write(line) | ||
109 | continue | ||
110 | orig = line | ||
111 | for license in sorted(license_map, key=len, reverse=True): | ||
112 | for ending in ['"', "'", " ", ")"]: | ||
113 | line = line.replace(license + ending, license_map[license] + ending) | ||
114 | if orig != line: | ||
115 | modified = True | ||
116 | new_file.write(line) | ||
117 | new_file.close() | ||
118 | if modified: | ||
119 | shutil.copymode(fn, abs_path) | ||
120 | os.remove(fn) | ||
121 | shutil.move(abs_path, fn) | ||
122 | except UnicodeDecodeError: | ||
123 | pass | ||
124 | |||
125 | ourname = os.path.basename(sys.argv[0]) | ||
126 | ourversion = "0.01" | ||
127 | |||
128 | if os.path.isfile(sys.argv[1]): | ||
129 | processfile(sys.argv[1]) | ||
130 | sys.exit(0) | ||
131 | |||
132 | for targetdir in sys.argv[1:]: | ||
133 | print("processing directory '%s'" % targetdir) | ||
134 | for root, dirs, files in os.walk(targetdir): | ||
135 | for name in files: | ||
136 | if name == ourname: | ||
137 | continue | ||
138 | fn = os.path.join(root, name) | ||
139 | if os.path.islink(fn): | ||
140 | continue | ||
141 | if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"): | ||
142 | continue | ||
143 | processfile(fn) | ||
144 | |||
145 | print("All files processed with version %s" % ourversion) | ||
diff --git a/scripts/contrib/convert-variable-renames.py b/scripts/contrib/convert-variable-renames.py new file mode 100755 index 0000000000..eded90ca61 --- /dev/null +++ b/scripts/contrib/convert-variable-renames.py | |||
@@ -0,0 +1,116 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # | ||
3 | # Conversion script to rename variables to versions with improved terminology. | ||
4 | # Also highlights potentially problematic language and removed variables. | ||
5 | # | ||
6 | # Copyright (C) 2021 Richard Purdie | ||
7 | # Copyright (C) 2022 Wind River Systems, Inc. | ||
8 | # | ||
9 | # SPDX-License-Identifier: GPL-2.0-only | ||
10 | # | ||
11 | |||
12 | import re | ||
13 | import os | ||
14 | import sys | ||
15 | import tempfile | ||
16 | import shutil | ||
17 | import mimetypes | ||
18 | |||
19 | if len(sys.argv) < 2: | ||
20 | print("Please specify a directory to run the conversion script against.") | ||
21 | sys.exit(1) | ||
22 | |||
23 | renames = { | ||
24 | "BB_ENV_WHITELIST" : "BB_ENV_PASSTHROUGH", | ||
25 | "BB_ENV_EXTRAWHITE" : "BB_ENV_PASSTHROUGH_ADDITIONS", | ||
26 | "BB_HASHCONFIG_WHITELIST" : "BB_HASHCONFIG_IGNORE_VARS", | ||
27 | "BB_SETSCENE_ENFORCE_WHITELIST" : "BB_SETSCENE_ENFORCE_IGNORE_TASKS", | ||
28 | "BB_HASHBASE_WHITELIST" : "BB_BASEHASH_IGNORE_VARS", | ||
29 | "BB_HASHTASK_WHITELIST" : "BB_TASKHASH_IGNORE_TASKS", | ||
30 | "CVE_CHECK_PN_WHITELIST" : "CVE_CHECK_SKIP_RECIPE", | ||
31 | "CVE_CHECK_WHITELIST" : "CVE_CHECK_IGNORE", | ||
32 | "MULTI_PROVIDER_WHITELIST" : "BB_MULTI_PROVIDER_ALLOWED", | ||
33 | "PNBLACKLIST" : "SKIP_RECIPE", | ||
34 | "SDK_LOCAL_CONF_BLACKLIST" : "ESDK_LOCALCONF_REMOVE", | ||
35 | "SDK_LOCAL_CONF_WHITELIST" : "ESDK_LOCALCONF_ALLOW", | ||
36 | "SDK_INHERIT_BLACKLIST" : "ESDK_CLASS_INHERIT_DISABLE", | ||
37 | "SSTATE_DUPWHITELIST" : "SSTATE_ALLOW_OVERLAP_FILES", | ||
38 | "SYSROOT_DIRS_BLACKLIST" : "SYSROOT_DIRS_IGNORE", | ||
39 | "UNKNOWN_CONFIGURE_WHITELIST" : "UNKNOWN_CONFIGURE_OPT_IGNORE", | ||
40 | "ICECC_USER_CLASS_BL" : "ICECC_CLASS_DISABLE", | ||
41 | "ICECC_SYSTEM_CLASS_BL" : "ICECC_CLASS_DISABLE", | ||
42 | "ICECC_USER_PACKAGE_WL" : "ICECC_RECIPE_ENABLE", | ||
43 | "ICECC_USER_PACKAGE_BL" : "ICECC_RECIPE_DISABLE", | ||
44 | "ICECC_SYSTEM_PACKAGE_BL" : "ICECC_RECIPE_DISABLE", | ||
45 | "LICENSE_FLAGS_WHITELIST" : "LICENSE_FLAGS_ACCEPTED", | ||
46 | } | ||
47 | |||
48 | removed_list = [ | ||
49 | "BB_STAMP_WHITELIST", | ||
50 | "BB_STAMP_POLICY", | ||
51 | "INHERIT_BLACKLIST", | ||
52 | "TUNEABI_WHITELIST", | ||
53 | ] | ||
54 | |||
55 | context_check_list = [ | ||
56 | "blacklist", | ||
57 | "whitelist", | ||
58 | "abort", | ||
59 | ] | ||
60 | |||
61 | def processfile(fn): | ||
62 | |||
63 | print("processing file '%s'" % fn) | ||
64 | try: | ||
65 | fh, abs_path = tempfile.mkstemp() | ||
66 | modified = False | ||
67 | with os.fdopen(fh, 'w') as new_file: | ||
68 | with open(fn, "r") as old_file: | ||
69 | lineno = 0 | ||
70 | for line in old_file: | ||
71 | lineno += 1 | ||
72 | if not line or "BB_RENAMED_VARIABLE" in line: | ||
73 | continue | ||
74 | # Do the renames | ||
75 | for old_name, new_name in renames.items(): | ||
76 | if old_name in line: | ||
77 | line = line.replace(old_name, new_name) | ||
78 | modified = True | ||
79 | # Find removed names | ||
80 | for removed_name in removed_list: | ||
81 | if removed_name in line: | ||
82 | print("%s needs further work at line %s because %s has been deprecated" % (fn, lineno, removed_name)) | ||
83 | for check_word in context_check_list: | ||
84 | if re.search(check_word, line, re.IGNORECASE): | ||
85 | print("%s needs further work at line %s since it contains %s"% (fn, lineno, check_word)) | ||
86 | new_file.write(line) | ||
87 | new_file.close() | ||
88 | if modified: | ||
89 | print("*** Modified file '%s'" % (fn)) | ||
90 | shutil.copymode(fn, abs_path) | ||
91 | os.remove(fn) | ||
92 | shutil.move(abs_path, fn) | ||
93 | except UnicodeDecodeError: | ||
94 | pass | ||
95 | |||
96 | ourname = os.path.basename(sys.argv[0]) | ||
97 | ourversion = "0.1" | ||
98 | |||
99 | if os.path.isfile(sys.argv[1]): | ||
100 | processfile(sys.argv[1]) | ||
101 | sys.exit(0) | ||
102 | |||
103 | for targetdir in sys.argv[1:]: | ||
104 | print("processing directory '%s'" % targetdir) | ||
105 | for root, dirs, files in os.walk(targetdir): | ||
106 | for name in files: | ||
107 | if name == ourname: | ||
108 | continue | ||
109 | fn = os.path.join(root, name) | ||
110 | if os.path.islink(fn): | ||
111 | continue | ||
112 | if "ChangeLog" in fn or "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"): | ||
113 | continue | ||
114 | processfile(fn) | ||
115 | |||
116 | print("All files processed with version %s" % ourversion) | ||
diff --git a/scripts/contrib/ddimage b/scripts/contrib/ddimage index 7f2ad112a6..70eee8ebea 100755 --- a/scripts/contrib/ddimage +++ b/scripts/contrib/ddimage | |||
@@ -1,5 +1,7 @@ | |||
1 | #!/bin/sh | 1 | #!/bin/sh |
2 | # | 2 | # |
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 6 | # |
5 | 7 | ||
diff --git a/scripts/contrib/dialog-power-control b/scripts/contrib/dialog-power-control index ad6070c369..82c84baa1d 100755 --- a/scripts/contrib/dialog-power-control +++ b/scripts/contrib/dialog-power-control | |||
@@ -1,5 +1,7 @@ | |||
1 | #!/bin/sh | 1 | #!/bin/sh |
2 | # | 2 | # |
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 6 | # |
5 | # Simple script to show a manual power prompt for when you want to use | 7 | # Simple script to show a manual power prompt for when you want to use |
diff --git a/scripts/contrib/documentation-audit.sh b/scripts/contrib/documentation-audit.sh index f436f9bae0..7197a2fcea 100755 --- a/scripts/contrib/documentation-audit.sh +++ b/scripts/contrib/documentation-audit.sh | |||
@@ -1,5 +1,7 @@ | |||
1 | #!/bin/bash | 1 | #!/bin/bash |
2 | # | 2 | # |
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 6 | # |
5 | # Perform an audit of which packages provide documentation and which | 7 | # Perform an audit of which packages provide documentation and which |
@@ -26,8 +28,8 @@ if [ -z "$BITBAKE" ]; then | |||
26 | fi | 28 | fi |
27 | 29 | ||
28 | echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results" | 30 | echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results" |
29 | echo "REMINDER: you need to set LICENSE_FLAGS_WHITELIST appropriately in local.conf or " | 31 | echo "REMINDER: you need to set LICENSE_FLAGS_ACCEPTED appropriately in local.conf or " |
30 | echo " you'll get false positives. For example, LICENSE_FLAGS_WHITELIST = \"commercial\"" | 32 | echo " you'll get false positives. For example, LICENSE_FLAGS_ACCEPTED = \"commercial\"" |
31 | 33 | ||
32 | for pkg in `bitbake -s | awk '{ print \$1 }'`; do | 34 | for pkg in `bitbake -s | awk '{ print \$1 }'`; do |
33 | if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" || | 35 | if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" || |
diff --git a/scripts/contrib/image-manifest b/scripts/contrib/image-manifest index 3c07a73a4e..4d65a99258 100755 --- a/scripts/contrib/image-manifest +++ b/scripts/contrib/image-manifest | |||
@@ -392,7 +392,7 @@ def export_manifest_info(args): | |||
392 | for key in rd.getVarFlags('PACKAGECONFIG').keys(): | 392 | for key in rd.getVarFlags('PACKAGECONFIG').keys(): |
393 | if key == 'doc': | 393 | if key == 'doc': |
394 | continue | 394 | continue |
395 | rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key, True) | 395 | rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key) |
396 | 396 | ||
397 | if config['patches'] == 'yes': | 397 | if config['patches'] == 'yes': |
398 | patches = oe.recipeutils.get_recipe_patches(rd) | 398 | patches = oe.recipeutils.get_recipe_patches(rd) |
diff --git a/scripts/contrib/improve_kernel_cve_report.py b/scripts/contrib/improve_kernel_cve_report.py new file mode 100755 index 0000000000..829cc4cd30 --- /dev/null +++ b/scripts/contrib/improve_kernel_cve_report.py | |||
@@ -0,0 +1,467 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | # | ||
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
5 | # The script uses another source of CVE information from linux-vulns | ||
6 | # to enrich the cve-summary from cve-check or vex. | ||
7 | # It can also use the list of compiled files from the kernel spdx to ignore CVEs | ||
8 | # that are not affected since the files are not compiled. | ||
9 | # | ||
10 | # It creates a new json file with updated CVE information | ||
11 | # | ||
12 | # Compiled files can be extracted adding the following in local.conf | ||
13 | # SPDX_INCLUDE_COMPILED_SOURCES:pn-linux-yocto = "1" | ||
14 | # | ||
15 | # Tested with the following CVE sources: | ||
16 | # - https://git.kernel.org/pub/scm/linux/security/vulns.git | ||
17 | # - https://github.com/CVEProject/cvelistV5 | ||
18 | # | ||
19 | # Example: | ||
20 | # python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --kernel-version 6.12.27 --datadir ./vulns | ||
21 | # python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --datadir ./vulns --old-cve-report build/tmp/log/cve/cve-summary.json | ||
22 | # | ||
23 | # SPDX-License-Identifier: GPLv2 | ||
24 | |||
25 | import argparse | ||
26 | import json | ||
27 | import sys | ||
28 | import logging | ||
29 | import glob | ||
30 | import os | ||
31 | import pathlib | ||
32 | from packaging.version import Version | ||
33 | |||
34 | def is_linux_cve(cve_info): | ||
35 | '''Return true is the CVE belongs to Linux''' | ||
36 | if not "affected" in cve_info["containers"]["cna"]: | ||
37 | return False | ||
38 | for affected in cve_info["containers"]["cna"]["affected"]: | ||
39 | if not "product" in affected: | ||
40 | return False | ||
41 | if affected["product"] == "Linux" and affected["vendor"] == "Linux": | ||
42 | return True | ||
43 | return False | ||
44 | |||
45 | def get_kernel_cves(datadir, compiled_files, version): | ||
46 | """ | ||
47 | Get CVEs for the kernel | ||
48 | """ | ||
49 | cves = {} | ||
50 | |||
51 | check_config = len(compiled_files) > 0 | ||
52 | |||
53 | base_version = Version(f"{version.major}.{version.minor}") | ||
54 | |||
55 | # Check all CVES from kernel vulns | ||
56 | pattern = os.path.join(datadir, '**', "CVE-*.json") | ||
57 | cve_files = glob.glob(pattern, recursive=True) | ||
58 | not_applicable_config = 0 | ||
59 | fixed_as_later_backport = 0 | ||
60 | vulnerable = 0 | ||
61 | not_vulnerable = 0 | ||
62 | for cve_file in sorted(cve_files): | ||
63 | cve_info = {} | ||
64 | with open(cve_file, "r", encoding='ISO-8859-1') as f: | ||
65 | cve_info = json.load(f) | ||
66 | |||
67 | if len(cve_info) == 0: | ||
68 | logging.error("Not valid data in %s. Aborting", cve_file) | ||
69 | break | ||
70 | |||
71 | if not is_linux_cve(cve_info): | ||
72 | continue | ||
73 | cve_id = os.path.basename(cve_file)[:-5] | ||
74 | description = cve_info["containers"]["cna"]["descriptions"][0]["value"] | ||
75 | if cve_file.find("rejected") >= 0: | ||
76 | logging.debug("%s is rejected by the CNA", cve_id) | ||
77 | cves[cve_id] = { | ||
78 | "id": cve_id, | ||
79 | "status": "Ignored", | ||
80 | "detail": "rejected", | ||
81 | "summary": description, | ||
82 | "description": f"Rejected by CNA" | ||
83 | } | ||
84 | continue | ||
85 | if any(elem in cve_file for elem in ["review", "reverved", "testing"]): | ||
86 | continue | ||
87 | |||
88 | is_vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected_versions = get_cpe_applicability(cve_info, version) | ||
89 | |||
90 | logging.debug("%s: %s (%s - %s) (%s - %s)", cve_id, is_vulnerable, better_match_first, better_match_last, first_affected, last_affected) | ||
91 | |||
92 | if is_vulnerable is None: | ||
93 | logging.warning("%s doesn't have good metadata", cve_id) | ||
94 | if is_vulnerable: | ||
95 | is_affected = True | ||
96 | affected_files = [] | ||
97 | if check_config: | ||
98 | is_affected, affected_files = check_kernel_compiled_files(compiled_files, cve_info) | ||
99 | |||
100 | if not is_affected and len(affected_files) > 0: | ||
101 | logging.debug( | ||
102 | "%s - not applicable configuration since affected files not compiled: %s", | ||
103 | cve_id, affected_files) | ||
104 | cves[cve_id] = { | ||
105 | "id": cve_id, | ||
106 | "status": "Ignored", | ||
107 | "detail": "not-applicable-config", | ||
108 | "summary": description, | ||
109 | "description": f"Source code not compiled by config. {affected_files}" | ||
110 | } | ||
111 | not_applicable_config +=1 | ||
112 | # Check if we have backport | ||
113 | else: | ||
114 | if not better_match_last: | ||
115 | fixed_in = last_affected | ||
116 | else: | ||
117 | fixed_in = better_match_last | ||
118 | logging.debug("%s needs backporting (fixed from %s)", cve_id, fixed_in) | ||
119 | cves[cve_id] = { | ||
120 | "id": cve_id, | ||
121 | "status": "Unpatched", | ||
122 | "detail": "version-in-range", | ||
123 | "summary": description, | ||
124 | "description": f"Needs backporting (fixed from {fixed_in})" | ||
125 | } | ||
126 | vulnerable += 1 | ||
127 | if (better_match_last and | ||
128 | Version(f"{better_match_last.major}.{better_match_last.minor}") == base_version): | ||
129 | fixed_as_later_backport += 1 | ||
130 | # Not vulnerable | ||
131 | else: | ||
132 | if not first_affected: | ||
133 | logging.debug("%s - not known affected %s", | ||
134 | cve_id, | ||
135 | better_match_last) | ||
136 | cves[cve_id] = { | ||
137 | "id": cve_id, | ||
138 | "status": "Patched", | ||
139 | "detail": "version-not-in-range", | ||
140 | "summary": description, | ||
141 | "description": "No CPE match" | ||
142 | } | ||
143 | not_vulnerable += 1 | ||
144 | continue | ||
145 | backport_base = Version(f"{better_match_last.major}.{better_match_last.minor}") | ||
146 | if version < first_affected: | ||
147 | logging.debug('%s - fixed-version: only affects %s onwards', | ||
148 | cve_id, | ||
149 | first_affected) | ||
150 | cves[cve_id] = { | ||
151 | "id": cve_id, | ||
152 | "status": "Patched", | ||
153 | "detail": "fixed-version", | ||
154 | "summary": description, | ||
155 | "description": f"only affects {first_affected} onwards" | ||
156 | } | ||
157 | not_vulnerable += 1 | ||
158 | elif last_affected <= version: | ||
159 | logging.debug("%s - fixed-version: Fixed from version %s", | ||
160 | cve_id, | ||
161 | last_affected) | ||
162 | cves[cve_id] = { | ||
163 | "id": cve_id, | ||
164 | "status": "Patched", | ||
165 | "detail": "fixed-version", | ||
166 | "summary": description, | ||
167 | "description": f"fixed-version: Fixed from version {last_affected}" | ||
168 | } | ||
169 | not_vulnerable += 1 | ||
170 | elif backport_base == base_version: | ||
171 | logging.debug("%s - cpe-stable-backport: Backported in %s", | ||
172 | cve_id, | ||
173 | better_match_last) | ||
174 | cves[cve_id] = { | ||
175 | "id": cve_id, | ||
176 | "status": "Patched", | ||
177 | "detail": "cpe-stable-backport", | ||
178 | "summary": description, | ||
179 | "description": f"Backported in {better_match_last}" | ||
180 | } | ||
181 | not_vulnerable += 1 | ||
182 | else: | ||
183 | logging.debug("%s - version not affected %s", cve_id, str(affected_versions)) | ||
184 | cves[cve_id] = { | ||
185 | "id": cve_id, | ||
186 | "status": "Patched", | ||
187 | "detail": "version-not-in-range", | ||
188 | "summary": description, | ||
189 | "description": f"Range {affected_versions}" | ||
190 | } | ||
191 | not_vulnerable += 1 | ||
192 | |||
193 | logging.info("Total CVEs ignored due to not applicable config: %d", not_applicable_config) | ||
194 | logging.info("Total CVEs not vulnerable due version-not-in-range: %d", not_vulnerable) | ||
195 | logging.info("Total vulnerable CVEs: %d", vulnerable) | ||
196 | |||
197 | logging.info("Total CVEs already backported in %s: %s", base_version, | ||
198 | fixed_as_later_backport) | ||
199 | return cves | ||
200 | |||
201 | def read_spdx(spdx_file): | ||
202 | '''Open SPDX file and extract compiled files''' | ||
203 | with open(spdx_file, 'r', encoding='ISO-8859-1') as f: | ||
204 | spdx = json.load(f) | ||
205 | if "spdxVersion" in spdx: | ||
206 | if spdx["spdxVersion"] == "SPDX-2.2": | ||
207 | return read_spdx2(spdx) | ||
208 | if "@graph" in spdx: | ||
209 | return read_spdx3(spdx) | ||
210 | return [] | ||
211 | |||
212 | def read_spdx2(spdx): | ||
213 | ''' | ||
214 | Read spdx2 compiled files from spdx | ||
215 | ''' | ||
216 | cfiles = set() | ||
217 | if 'files' not in spdx: | ||
218 | return cfiles | ||
219 | for item in spdx['files']: | ||
220 | for ftype in item['fileTypes']: | ||
221 | if ftype == "SOURCE": | ||
222 | filename = item["fileName"][item["fileName"].find("/")+1:] | ||
223 | cfiles.add(filename) | ||
224 | return cfiles | ||
225 | |||
226 | def read_spdx3(spdx): | ||
227 | ''' | ||
228 | Read spdx3 compiled files from spdx | ||
229 | ''' | ||
230 | cfiles = set() | ||
231 | for item in spdx["@graph"]: | ||
232 | if "software_primaryPurpose" not in item: | ||
233 | continue | ||
234 | if item["software_primaryPurpose"] == "source": | ||
235 | filename = item['name'][item['name'].find("/")+1:] | ||
236 | cfiles.add(filename) | ||
237 | return cfiles | ||
238 | |||
239 | def check_kernel_compiled_files(compiled_files, cve_info): | ||
240 | """ | ||
241 | Return if a CVE affected us depending on compiled files | ||
242 | """ | ||
243 | files_affected = set() | ||
244 | is_affected = False | ||
245 | |||
246 | for item in cve_info['containers']['cna']['affected']: | ||
247 | if "programFiles" in item: | ||
248 | for f in item['programFiles']: | ||
249 | if f not in files_affected: | ||
250 | files_affected.add(f) | ||
251 | |||
252 | if len(files_affected) > 0: | ||
253 | for f in files_affected: | ||
254 | if f in compiled_files: | ||
255 | logging.debug("File match: %s", f) | ||
256 | is_affected = True | ||
257 | return is_affected, files_affected | ||
258 | |||
259 | def get_cpe_applicability(cve_info, v): | ||
260 | ''' | ||
261 | Check if version is affected and return affected versions | ||
262 | ''' | ||
263 | base_branch = Version(f"{v.major}.{v.minor}") | ||
264 | affected = [] | ||
265 | if not 'cpeApplicability' in cve_info["containers"]["cna"]: | ||
266 | return None, None, None, None, None, None | ||
267 | |||
268 | for nodes in cve_info["containers"]["cna"]["cpeApplicability"]: | ||
269 | for node in nodes.values(): | ||
270 | vulnerable = False | ||
271 | matched_branch = False | ||
272 | first_affected = Version("5000") | ||
273 | last_affected = Version("0") | ||
274 | better_match_first = Version("0") | ||
275 | better_match_last = Version("5000") | ||
276 | |||
277 | if len(node[0]['cpeMatch']) == 0: | ||
278 | first_affected = None | ||
279 | last_affected = None | ||
280 | better_match_first = None | ||
281 | better_match_last = None | ||
282 | |||
283 | for cpe_match in node[0]['cpeMatch']: | ||
284 | version_start_including = Version("0") | ||
285 | version_end_excluding = Version("0") | ||
286 | if 'versionStartIncluding' in cpe_match: | ||
287 | version_start_including = Version(cpe_match['versionStartIncluding']) | ||
288 | else: | ||
289 | version_start_including = Version("0") | ||
290 | # if versionEndExcluding is missing we are in a branch, which is not fixed. | ||
291 | if "versionEndExcluding" in cpe_match: | ||
292 | version_end_excluding = Version(cpe_match["versionEndExcluding"]) | ||
293 | else: | ||
294 | # if versionEndExcluding is missing we are in a branch, which is not fixed. | ||
295 | version_end_excluding = Version( | ||
296 | f"{version_start_including.major}.{version_start_including.minor}.5000" | ||
297 | ) | ||
298 | affected.append(f" {version_start_including}-{version_end_excluding}") | ||
299 | # Detect if versionEnd is in fixed in base branch. It has precedence over the rest | ||
300 | branch_end = Version(f"{version_end_excluding.major}.{version_end_excluding.minor}") | ||
301 | if branch_end == base_branch: | ||
302 | if version_start_including <= v < version_end_excluding: | ||
303 | vulnerable = cpe_match['vulnerable'] | ||
304 | # If we don't match in our branch, we are not vulnerable, | ||
305 | # since we have a backport | ||
306 | matched_branch = True | ||
307 | better_match_first = version_start_including | ||
308 | better_match_last = version_end_excluding | ||
309 | if version_start_including <= v < version_end_excluding and not matched_branch: | ||
310 | if version_end_excluding < better_match_last: | ||
311 | better_match_first = max(version_start_including, better_match_first) | ||
312 | better_match_last = min(better_match_last, version_end_excluding) | ||
313 | vulnerable = cpe_match['vulnerable'] | ||
314 | matched_branch = True | ||
315 | |||
316 | first_affected = min(version_start_including, first_affected) | ||
317 | last_affected = max(version_end_excluding, last_affected) | ||
318 | # Not a better match, we use the first and last affected instead of the fake .5000 | ||
319 | if vulnerable and better_match_last == Version(f"{base_branch}.5000"): | ||
320 | better_match_last = last_affected | ||
321 | better_match_first = first_affected | ||
322 | return vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected | ||
323 | |||
324 | def copy_data(old, new): | ||
325 | '''Update dictionary with new entries, while keeping the old ones''' | ||
326 | for k in new.keys(): | ||
327 | old[k] = new[k] | ||
328 | return old | ||
329 | |||
330 | # Function taken from cve_check.bbclass. Adapted to cve fields | ||
331 | def cve_update(cve_data, cve, entry): | ||
332 | # If no entry, just add it | ||
333 | if cve not in cve_data: | ||
334 | cve_data[cve] = entry | ||
335 | return | ||
336 | # If we are updating, there might be change in the status | ||
337 | if cve_data[cve]['status'] == "Unknown": | ||
338 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
339 | return | ||
340 | if cve_data[cve]['status'] == entry['status']: | ||
341 | return | ||
342 | if entry['status'] == "Unpatched" and cve_data[cve]['status'] == "Patched": | ||
343 | logging.warning("CVE entry %s update from Patched to Unpatched from the scan result", cve) | ||
344 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
345 | return | ||
346 | if entry['status'] == "Patched" and cve_data[cve]['status'] == "Unpatched": | ||
347 | logging.warning("CVE entry %s update from Unpatched to Patched from the scan result", cve) | ||
348 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
349 | return | ||
350 | # If we have an "Ignored", it has a priority | ||
351 | if cve_data[cve]['status'] == "Ignored": | ||
352 | logging.debug("CVE %s not updating because Ignored", cve) | ||
353 | return | ||
354 | # If we have an "Ignored", it has a priority | ||
355 | if entry['status'] == "Ignored": | ||
356 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
357 | logging.debug("CVE entry %s updated from Unpatched to Ignored", cve) | ||
358 | return | ||
359 | logging.warning("Unhandled CVE entry update for %s %s from %s %s to %s", | ||
360 | cve, cve_data[cve]['status'], cve_data[cve]['detail'], entry['status'], entry['detail']) | ||
361 | |||
362 | def main(): | ||
363 | parser = argparse.ArgumentParser( | ||
364 | description="Update cve-summary with kernel compiled files and kernel CVE information" | ||
365 | ) | ||
366 | parser.add_argument( | ||
367 | "-s", | ||
368 | "--spdx", | ||
369 | help="SPDX2/3 for the kernel. Needs to include compiled sources", | ||
370 | ) | ||
371 | parser.add_argument( | ||
372 | "--datadir", | ||
373 | type=pathlib.Path, | ||
374 | help="Directory where CVE data is", | ||
375 | required=True | ||
376 | ) | ||
377 | parser.add_argument( | ||
378 | "--old-cve-report", | ||
379 | help="CVE report to update. (Optional)", | ||
380 | ) | ||
381 | parser.add_argument( | ||
382 | "--kernel-version", | ||
383 | help="Kernel version. Needed if old cve_report is not provided (Optional)", | ||
384 | type=Version | ||
385 | ) | ||
386 | parser.add_argument( | ||
387 | "--new-cve-report", | ||
388 | help="Output file", | ||
389 | default="cve-summary-enhance.json" | ||
390 | ) | ||
391 | parser.add_argument( | ||
392 | "-D", | ||
393 | "--debug", | ||
394 | help='Enable debug ', | ||
395 | action="store_true") | ||
396 | |||
397 | args = parser.parse_args() | ||
398 | |||
399 | if args.debug: | ||
400 | log_level=logging.DEBUG | ||
401 | else: | ||
402 | log_level=logging.INFO | ||
403 | logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', level=log_level) | ||
404 | |||
405 | if not args.kernel_version and not args.old_cve_report: | ||
406 | parser.error("either --kernel-version or --old-cve-report are needed") | ||
407 | return -1 | ||
408 | |||
409 | # by default we don't check the compiled files, unless provided | ||
410 | compiled_files = [] | ||
411 | if args.spdx: | ||
412 | compiled_files = read_spdx(args.spdx) | ||
413 | logging.info("Total compiled files %d", len(compiled_files)) | ||
414 | |||
415 | if args.old_cve_report: | ||
416 | with open(args.old_cve_report, encoding='ISO-8859-1') as f: | ||
417 | cve_report = json.load(f) | ||
418 | else: | ||
419 | #If summary not provided, we create one | ||
420 | cve_report = { | ||
421 | "version": "1", | ||
422 | "package": [ | ||
423 | { | ||
424 | "name": "linux-yocto", | ||
425 | "version": str(args.kernel_version), | ||
426 | "products": [ | ||
427 | { | ||
428 | "product": "linux_kernel", | ||
429 | "cvesInRecord": "Yes" | ||
430 | } | ||
431 | ], | ||
432 | "issue": [] | ||
433 | } | ||
434 | ] | ||
435 | } | ||
436 | |||
437 | for pkg in cve_report['package']: | ||
438 | is_kernel = False | ||
439 | for product in pkg['products']: | ||
440 | if product['product'] == "linux_kernel": | ||
441 | is_kernel=True | ||
442 | if not is_kernel: | ||
443 | continue | ||
444 | |||
445 | kernel_cves = get_kernel_cves(args.datadir, | ||
446 | compiled_files, | ||
447 | Version(pkg["version"])) | ||
448 | logging.info("Total kernel cves from kernel CNA: %s", len(kernel_cves)) | ||
449 | cves = {issue["id"]: issue for issue in pkg["issue"]} | ||
450 | logging.info("Total kernel before processing cves: %s", len(cves)) | ||
451 | |||
452 | for cve in kernel_cves: | ||
453 | cve_update(cves, cve, kernel_cves[cve]) | ||
454 | |||
455 | pkg["issue"] = [] | ||
456 | for cve in sorted(cves): | ||
457 | pkg["issue"].extend([cves[cve]]) | ||
458 | logging.info("Total kernel cves after processing: %s", len(pkg['issue'])) | ||
459 | |||
460 | with open(args.new_cve_report, "w", encoding='ISO-8859-1') as f: | ||
461 | json.dump(cve_report, f, indent=2) | ||
462 | |||
463 | return 0 | ||
464 | |||
465 | if __name__ == "__main__": | ||
466 | sys.exit(main()) | ||
467 | |||
diff --git a/scripts/contrib/make-spdx-bindings.sh b/scripts/contrib/make-spdx-bindings.sh new file mode 100755 index 0000000000..31caaf339d --- /dev/null +++ b/scripts/contrib/make-spdx-bindings.sh | |||
@@ -0,0 +1,12 @@ | |||
1 | #! /bin/sh | ||
2 | # | ||
3 | # SPDX-License-Identifier: MIT | ||
4 | |||
5 | THIS_DIR="$(dirname "$0")" | ||
6 | |||
7 | VERSION="3.0.1" | ||
8 | |||
9 | shacl2code generate --input https://spdx.org/rdf/$VERSION/spdx-model.ttl \ | ||
10 | --input https://spdx.org/rdf/$VERSION/spdx-json-serialize-annotations.ttl \ | ||
11 | --context https://spdx.org/rdf/$VERSION/spdx-context.jsonld \ | ||
12 | python -o $THIS_DIR/../../meta/lib/oe/spdx30.py | ||
diff --git a/scripts/contrib/oe-build-perf-report-email.py b/scripts/contrib/oe-build-perf-report-email.py index de3862c897..7192113c28 100755 --- a/scripts/contrib/oe-build-perf-report-email.py +++ b/scripts/contrib/oe-build-perf-report-email.py | |||
@@ -19,8 +19,6 @@ import socket | |||
19 | import subprocess | 19 | import subprocess |
20 | import sys | 20 | import sys |
21 | import tempfile | 21 | import tempfile |
22 | from email.mime.image import MIMEImage | ||
23 | from email.mime.multipart import MIMEMultipart | ||
24 | from email.mime.text import MIMEText | 22 | from email.mime.text import MIMEText |
25 | 23 | ||
26 | 24 | ||
@@ -29,30 +27,6 @@ logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") | |||
29 | log = logging.getLogger('oe-build-perf-report') | 27 | log = logging.getLogger('oe-build-perf-report') |
30 | 28 | ||
31 | 29 | ||
32 | # Find js scaper script | ||
33 | SCRAPE_JS = os.path.join(os.path.dirname(__file__), '..', 'lib', 'build_perf', | ||
34 | 'scrape-html-report.js') | ||
35 | if not os.path.isfile(SCRAPE_JS): | ||
36 | log.error("Unableto find oe-build-perf-report-scrape.js") | ||
37 | sys.exit(1) | ||
38 | |||
39 | |||
40 | class ReportError(Exception): | ||
41 | """Local errors""" | ||
42 | pass | ||
43 | |||
44 | |||
45 | def check_utils(): | ||
46 | """Check that all needed utils are installed in the system""" | ||
47 | missing = [] | ||
48 | for cmd in ('phantomjs', 'optipng'): | ||
49 | if not shutil.which(cmd): | ||
50 | missing.append(cmd) | ||
51 | if missing: | ||
52 | log.error("The following tools are missing: %s", ' '.join(missing)) | ||
53 | sys.exit(1) | ||
54 | |||
55 | |||
56 | def parse_args(argv): | 30 | def parse_args(argv): |
57 | """Parse command line arguments""" | 31 | """Parse command line arguments""" |
58 | description = """Email build perf test report""" | 32 | description = """Email build perf test report""" |
@@ -77,137 +51,19 @@ def parse_args(argv): | |||
77 | "the email parts") | 51 | "the email parts") |
78 | parser.add_argument('--text', | 52 | parser.add_argument('--text', |
79 | help="Plain text message") | 53 | help="Plain text message") |
80 | parser.add_argument('--html', | ||
81 | help="HTML peport generated by oe-build-perf-report") | ||
82 | parser.add_argument('--phantomjs-args', action='append', | ||
83 | help="Extra command line arguments passed to PhantomJS") | ||
84 | 54 | ||
85 | args = parser.parse_args(argv) | 55 | args = parser.parse_args(argv) |
86 | 56 | ||
87 | if not args.html and not args.text: | 57 | if not args.text: |
88 | parser.error("Please specify --html and/or --text") | 58 | parser.error("Please specify --text") |
89 | 59 | ||
90 | return args | 60 | return args |
91 | 61 | ||
92 | 62 | ||
93 | def decode_png(infile, outfile): | 63 | def send_email(text_fn, subject, recipients, copy=[], blind_copy=[]): |
94 | """Parse/decode/optimize png data from a html element""" | ||
95 | with open(infile) as f: | ||
96 | raw_data = f.read() | ||
97 | |||
98 | # Grab raw base64 data | ||
99 | b64_data = re.sub('^.*href="data:image/png;base64,', '', raw_data, 1) | ||
100 | b64_data = re.sub('">.+$', '', b64_data, 1) | ||
101 | |||
102 | # Replace file with proper decoded png | ||
103 | with open(outfile, 'wb') as f: | ||
104 | f.write(base64.b64decode(b64_data)) | ||
105 | |||
106 | subprocess.check_output(['optipng', outfile], stderr=subprocess.STDOUT) | ||
107 | |||
108 | |||
109 | def mangle_html_report(infile, outfile, pngs): | ||
110 | """Mangle html file into a email compatible format""" | ||
111 | paste = True | ||
112 | png_dir = os.path.dirname(outfile) | ||
113 | with open(infile) as f_in: | ||
114 | with open(outfile, 'w') as f_out: | ||
115 | for line in f_in.readlines(): | ||
116 | stripped = line.strip() | ||
117 | # Strip out scripts | ||
118 | if stripped == '<!--START-OF-SCRIPTS-->': | ||
119 | paste = False | ||
120 | elif stripped == '<!--END-OF-SCRIPTS-->': | ||
121 | paste = True | ||
122 | elif paste: | ||
123 | if re.match('^.+href="data:image/png;base64', stripped): | ||
124 | # Strip out encoded pngs (as they're huge in size) | ||
125 | continue | ||
126 | elif 'www.gstatic.com' in stripped: | ||
127 | # HACK: drop references to external static pages | ||
128 | continue | ||
129 | |||
130 | # Replace charts with <img> elements | ||
131 | match = re.match('<div id="(?P<id>\w+)"', stripped) | ||
132 | if match and match.group('id') in pngs: | ||
133 | f_out.write('<img src="cid:{}"\n'.format(match.group('id'))) | ||
134 | else: | ||
135 | f_out.write(line) | ||
136 | |||
137 | |||
138 | def scrape_html_report(report, outdir, phantomjs_extra_args=None): | ||
139 | """Scrape html report into a format sendable by email""" | ||
140 | tmpdir = tempfile.mkdtemp(dir='.') | ||
141 | log.debug("Using tmpdir %s for phantomjs output", tmpdir) | ||
142 | |||
143 | if not os.path.isdir(outdir): | ||
144 | os.mkdir(outdir) | ||
145 | if os.path.splitext(report)[1] not in ('.html', '.htm'): | ||
146 | raise ReportError("Invalid file extension for report, needs to be " | ||
147 | "'.html' or '.htm'") | ||
148 | |||
149 | try: | ||
150 | log.info("Scraping HTML report with PhangomJS") | ||
151 | extra_args = phantomjs_extra_args if phantomjs_extra_args else [] | ||
152 | subprocess.check_output(['phantomjs', '--debug=true'] + extra_args + | ||
153 | [SCRAPE_JS, report, tmpdir], | ||
154 | stderr=subprocess.STDOUT) | ||
155 | |||
156 | pngs = [] | ||
157 | images = [] | ||
158 | for fname in os.listdir(tmpdir): | ||
159 | base, ext = os.path.splitext(fname) | ||
160 | if ext == '.png': | ||
161 | log.debug("Decoding %s", fname) | ||
162 | decode_png(os.path.join(tmpdir, fname), | ||
163 | os.path.join(outdir, fname)) | ||
164 | pngs.append(base) | ||
165 | images.append(fname) | ||
166 | elif ext in ('.html', '.htm'): | ||
167 | report_file = fname | ||
168 | else: | ||
169 | log.warning("Unknown file extension: '%s'", ext) | ||
170 | #shutil.move(os.path.join(tmpdir, fname), outdir) | ||
171 | |||
172 | log.debug("Mangling html report file %s", report_file) | ||
173 | mangle_html_report(os.path.join(tmpdir, report_file), | ||
174 | os.path.join(outdir, report_file), pngs) | ||
175 | return (os.path.join(outdir, report_file), | ||
176 | [os.path.join(outdir, i) for i in images]) | ||
177 | finally: | ||
178 | shutil.rmtree(tmpdir) | ||
179 | |||
180 | def send_email(text_fn, html_fn, image_fns, subject, recipients, copy=[], | ||
181 | blind_copy=[]): | ||
182 | """Send email""" | ||
183 | # Generate email message | 64 | # Generate email message |
184 | text_msg = html_msg = None | 65 | with open(text_fn) as f: |
185 | if text_fn: | 66 | msg = MIMEText("Yocto build performance test report.\n" + f.read(), 'plain') |
186 | with open(text_fn) as f: | ||
187 | text_msg = MIMEText("Yocto build performance test report.\n" + | ||
188 | f.read(), 'plain') | ||
189 | if html_fn: | ||
190 | html_msg = msg = MIMEMultipart('related') | ||
191 | with open(html_fn) as f: | ||
192 | html_msg.attach(MIMEText(f.read(), 'html')) | ||
193 | for img_fn in image_fns: | ||
194 | # Expect that content id is same as the filename | ||
195 | cid = os.path.splitext(os.path.basename(img_fn))[0] | ||
196 | with open(img_fn, 'rb') as f: | ||
197 | image_msg = MIMEImage(f.read()) | ||
198 | image_msg['Content-ID'] = '<{}>'.format(cid) | ||
199 | html_msg.attach(image_msg) | ||
200 | |||
201 | if text_msg and html_msg: | ||
202 | msg = MIMEMultipart('alternative') | ||
203 | msg.attach(text_msg) | ||
204 | msg.attach(html_msg) | ||
205 | elif text_msg: | ||
206 | msg = text_msg | ||
207 | elif html_msg: | ||
208 | msg = html_msg | ||
209 | else: | ||
210 | raise ReportError("Neither plain text nor html body specified") | ||
211 | 67 | ||
212 | pw_data = pwd.getpwuid(os.getuid()) | 68 | pw_data = pwd.getpwuid(os.getuid()) |
213 | full_name = pw_data.pw_gecos.split(',')[0] | 69 | full_name = pw_data.pw_gecos.split(',')[0] |
@@ -234,8 +90,6 @@ def main(argv=None): | |||
234 | if args.debug: | 90 | if args.debug: |
235 | log.setLevel(logging.DEBUG) | 91 | log.setLevel(logging.DEBUG) |
236 | 92 | ||
237 | check_utils() | ||
238 | |||
239 | if args.outdir: | 93 | if args.outdir: |
240 | outdir = args.outdir | 94 | outdir = args.outdir |
241 | if not os.path.exists(outdir): | 95 | if not os.path.exists(outdir): |
@@ -245,25 +99,16 @@ def main(argv=None): | |||
245 | 99 | ||
246 | try: | 100 | try: |
247 | log.debug("Storing email parts in %s", outdir) | 101 | log.debug("Storing email parts in %s", outdir) |
248 | html_report = images = None | ||
249 | if args.html: | ||
250 | html_report, images = scrape_html_report(args.html, outdir, | ||
251 | args.phantomjs_args) | ||
252 | |||
253 | if args.to: | 102 | if args.to: |
254 | log.info("Sending email to %s", ', '.join(args.to)) | 103 | log.info("Sending email to %s", ', '.join(args.to)) |
255 | if args.cc: | 104 | if args.cc: |
256 | log.info("Copying to %s", ', '.join(args.cc)) | 105 | log.info("Copying to %s", ', '.join(args.cc)) |
257 | if args.bcc: | 106 | if args.bcc: |
258 | log.info("Blind copying to %s", ', '.join(args.bcc)) | 107 | log.info("Blind copying to %s", ', '.join(args.bcc)) |
259 | send_email(args.text, html_report, images, args.subject, | 108 | send_email(args.text, args.subject, args.to, args.cc, args.bcc) |
260 | args.to, args.cc, args.bcc) | ||
261 | except subprocess.CalledProcessError as err: | 109 | except subprocess.CalledProcessError as err: |
262 | log.error("%s, with output:\n%s", str(err), err.output.decode()) | 110 | log.error("%s, with output:\n%s", str(err), err.output.decode()) |
263 | return 1 | 111 | return 1 |
264 | except ReportError as err: | ||
265 | log.error(err) | ||
266 | return 1 | ||
267 | finally: | 112 | finally: |
268 | if not args.outdir: | 113 | if not args.outdir: |
269 | log.debug("Wiping %s", outdir) | 114 | log.debug("Wiping %s", outdir) |
diff --git a/scripts/contrib/oe-image-files-spdx/.gitignore b/scripts/contrib/oe-image-files-spdx/.gitignore new file mode 100644 index 0000000000..285851c984 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/.gitignore | |||
@@ -0,0 +1,8 @@ | |||
1 | *.spdx.json | ||
2 | *.pyc | ||
3 | *.bak | ||
4 | *.swp | ||
5 | *.swo | ||
6 | *.swn | ||
7 | venv/* | ||
8 | .venv/* | ||
diff --git a/scripts/contrib/oe-image-files-spdx/README.md b/scripts/contrib/oe-image-files-spdx/README.md new file mode 100644 index 0000000000..44f76eacd8 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/README.md | |||
@@ -0,0 +1,24 @@ | |||
1 | # OE Image Files from SBoM | ||
2 | |||
3 | This is an example python script that will list the packaged files with their | ||
4 | checksums based on the SPDX 3.0.1 SBoM. | ||
5 | |||
6 | It can be used as a template for other programs to investigate output based on | ||
7 | OE SPDX SBoMs | ||
8 | |||
9 | ## Installation | ||
10 | |||
11 | This project can be installed using an virtual environment: | ||
12 | ``` | ||
13 | python3 -m venv .venv | ||
14 | .venv/bin/activate | ||
15 | python3 -m pip install -e '.[dev]' | ||
16 | ``` | ||
17 | |||
18 | ## Usage | ||
19 | |||
20 | After installing, the `oe-image-files` program can be used to show the files, e.g.: | ||
21 | |||
22 | ``` | ||
23 | oe-image-files core-image-minimal-qemux86-64.rootfs.spdx.json | ||
24 | ``` | ||
diff --git a/scripts/contrib/oe-image-files-spdx/pyproject.toml b/scripts/contrib/oe-image-files-spdx/pyproject.toml new file mode 100644 index 0000000000..3fab5dd605 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/pyproject.toml | |||
@@ -0,0 +1,23 @@ | |||
1 | [project] | ||
2 | name = "oe-image-files" | ||
3 | description = "Displays all packaged files on the root file system" | ||
4 | dynamic = ["version"] | ||
5 | requires-python = ">= 3.8" | ||
6 | readme = "README.md" | ||
7 | |||
8 | dependencies = [ | ||
9 | "spdx_python_model @ git+https://github.com/spdx/spdx-python-model.git@aa40861f11d1b5d20edba7101835341a70d91179", | ||
10 | ] | ||
11 | |||
12 | [project.scripts] | ||
13 | oe-image-files = "oe_image_files:main" | ||
14 | |||
15 | [build-system] | ||
16 | requires = ["hatchling"] | ||
17 | build-backend = "hatchling.build" | ||
18 | |||
19 | [tool.hatch.version] | ||
20 | path = "src/oe_image_files/version.py" | ||
21 | |||
22 | [tool.hatch.metadata] | ||
23 | allow-direct-references = true | ||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py new file mode 100644 index 0000000000..c28a133f2d --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py | |||
@@ -0,0 +1 @@ | |||
from .main import main | |||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py new file mode 100644 index 0000000000..8476bf6369 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py | |||
@@ -0,0 +1,86 @@ | |||
1 | # SPDX-License-Identifier: MIT | ||
2 | |||
3 | import argparse | ||
4 | from pathlib import Path | ||
5 | |||
6 | |||
7 | from spdx_python_model import v3_0_1 as spdx_3_0_1 | ||
8 | from .version import VERSION | ||
9 | |||
10 | |||
11 | def main(): | ||
12 | parser = argparse.ArgumentParser( | ||
13 | description="Show the packaged files and checksums in an OE image from the SPDX SBoM" | ||
14 | ) | ||
15 | parser.add_argument("file", help="SPDX 3 input file", type=Path) | ||
16 | parser.add_argument("--version", "-V", action="version", version=VERSION) | ||
17 | |||
18 | args = parser.parse_args() | ||
19 | |||
20 | # Load SPDX data from file into a new object set | ||
21 | objset = spdx_3_0_1.SHACLObjectSet() | ||
22 | with args.file.open("r") as f: | ||
23 | d = spdx_3_0_1.JSONLDDeserializer() | ||
24 | d.read(f, objset) | ||
25 | |||
26 | # Find the top level SPDX Document object | ||
27 | for o in objset.foreach_type(spdx_3_0_1.SpdxDocument): | ||
28 | doc = o | ||
29 | break | ||
30 | else: | ||
31 | print("ERROR: No SPDX Document found!") | ||
32 | return 1 | ||
33 | |||
34 | # Find the root SBoM in the document | ||
35 | for o in doc.rootElement: | ||
36 | if isinstance(o, spdx_3_0_1.software_Sbom): | ||
37 | sbom = o | ||
38 | break | ||
39 | else: | ||
40 | print("ERROR: SBoM not found in document") | ||
41 | return 1 | ||
42 | |||
43 | # Find the root file system package in the SBoM | ||
44 | for o in sbom.rootElement: | ||
45 | if ( | ||
46 | isinstance(o, spdx_3_0_1.software_Package) | ||
47 | and o.software_primaryPurpose == spdx_3_0_1.software_SoftwarePurpose.archive | ||
48 | ): | ||
49 | root_package = o | ||
50 | break | ||
51 | else: | ||
52 | print("ERROR: Package not found in document") | ||
53 | return 1 | ||
54 | |||
55 | # Find all relationships of type "contains" that go FROM the root file | ||
56 | # system | ||
57 | files = [] | ||
58 | for rel in objset.foreach_type(spdx_3_0_1.Relationship): | ||
59 | if not rel.relationshipType == spdx_3_0_1.RelationshipType.contains: | ||
60 | continue | ||
61 | |||
62 | if not rel.from_ is root_package: | ||
63 | continue | ||
64 | |||
65 | # Iterate over all files in the TO of the relationship | ||
66 | for o in rel.to: | ||
67 | if not isinstance(o, spdx_3_0_1.software_File): | ||
68 | continue | ||
69 | |||
70 | # Find the SHA 256 hash of the file (if any) | ||
71 | for h in o.verifiedUsing: | ||
72 | if ( | ||
73 | isinstance(h, spdx_3_0_1.Hash) | ||
74 | and h.algorithm == spdx_3_0_1.HashAlgorithm.sha256 | ||
75 | ): | ||
76 | files.append((o.name, h.hashValue)) | ||
77 | break | ||
78 | else: | ||
79 | files.append((o.name, "")) | ||
80 | |||
81 | # Print files | ||
82 | files.sort(key=lambda x: x[0]) | ||
83 | for name, hash_val in files: | ||
84 | print(f"{name} - {hash_val}") | ||
85 | |||
86 | return 0 | ||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py new file mode 100644 index 0000000000..901e5110b2 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py | |||
@@ -0,0 +1 @@ | |||
VERSION = "0.0.1" | |||
diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py index 62c509f51c..d8d7b214e5 100755 --- a/scripts/contrib/patchreview.py +++ b/scripts/contrib/patchreview.py | |||
@@ -1,14 +1,29 @@ | |||
1 | #! /usr/bin/env python3 | 1 | #! /usr/bin/env python3 |
2 | # | 2 | # |
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 6 | # |
5 | 7 | ||
8 | import argparse | ||
9 | import collections | ||
10 | import json | ||
11 | import os | ||
12 | import os.path | ||
13 | import pathlib | ||
14 | import re | ||
15 | import subprocess | ||
16 | |||
17 | import sys | ||
18 | sys.path.append(os.path.join(sys.path[0], '../../meta/lib')) | ||
19 | import oe.qa | ||
20 | |||
6 | # TODO | 21 | # TODO |
7 | # - option to just list all broken files | 22 | # - option to just list all broken files |
8 | # - test suite | 23 | # - test suite |
9 | # - validate signed-off-by | 24 | # - validate signed-off-by |
10 | 25 | ||
11 | status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied") | 26 | status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied", "inactive-upstream") |
12 | 27 | ||
13 | class Result: | 28 | class Result: |
14 | # Whether the patch has an Upstream-Status or not | 29 | # Whether the patch has an Upstream-Status or not |
@@ -33,20 +48,18 @@ def blame_patch(patch): | |||
33 | From a patch filename, return a list of "commit summary (author name <author | 48 | From a patch filename, return a list of "commit summary (author name <author |
34 | email>)" strings representing the history. | 49 | email>)" strings representing the history. |
35 | """ | 50 | """ |
36 | import subprocess | ||
37 | return subprocess.check_output(("git", "log", | 51 | return subprocess.check_output(("git", "log", |
38 | "--follow", "--find-renames", "--diff-filter=A", | 52 | "--follow", "--find-renames", "--diff-filter=A", |
39 | "--format=%s (%aN <%aE>)", | 53 | "--format=%s (%aN <%aE>)", |
40 | "--", patch)).decode("utf-8").splitlines() | 54 | "--", patch), cwd=os.path.dirname(patch)).decode("utf-8").splitlines() |
41 | 55 | ||
42 | def patchreview(path, patches): | 56 | def patchreview(patches): |
43 | import re, os.path | ||
44 | 57 | ||
45 | # General pattern: start of line, optional whitespace, tag with optional | 58 | # General pattern: start of line, optional whitespace, tag with optional |
46 | # hyphen or spaces, maybe a colon, some whitespace, then the value, all case | 59 | # hyphen or spaces, maybe a colon, some whitespace, then the value, all case |
47 | # insensitive. | 60 | # insensitive. |
48 | sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE) | 61 | sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE) |
49 | status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*(\w*)", re.IGNORECASE | re.MULTILINE) | 62 | status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*([\w-]*)", re.IGNORECASE | re.MULTILINE) |
50 | cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE) | 63 | cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE) |
51 | cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE) | 64 | cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE) |
52 | 65 | ||
@@ -54,11 +67,10 @@ def patchreview(path, patches): | |||
54 | 67 | ||
55 | for patch in patches: | 68 | for patch in patches: |
56 | 69 | ||
57 | fullpath = os.path.join(path, patch) | ||
58 | result = Result() | 70 | result = Result() |
59 | results[fullpath] = result | 71 | results[patch] = result |
60 | 72 | ||
61 | content = open(fullpath, encoding='ascii', errors='ignore').read() | 73 | content = open(patch, encoding='ascii', errors='ignore').read() |
62 | 74 | ||
63 | # Find the Signed-off-by tag | 75 | # Find the Signed-off-by tag |
64 | match = sob_re.search(content) | 76 | match = sob_re.search(content) |
@@ -70,12 +82,11 @@ def patchreview(path, patches): | |||
70 | else: | 82 | else: |
71 | result.missing_sob = True | 83 | result.missing_sob = True |
72 | 84 | ||
73 | |||
74 | # Find the Upstream-Status tag | 85 | # Find the Upstream-Status tag |
75 | match = status_re.search(content) | 86 | match = status_re.search(content) |
76 | if match: | 87 | if match: |
77 | value = match.group(1) | 88 | value = oe.qa.check_upstream_status(patch) |
78 | if value != "Upstream-Status:": | 89 | if value: |
79 | result.malformed_upstream_status = value | 90 | result.malformed_upstream_status = value |
80 | 91 | ||
81 | value = match.group(2).lower() | 92 | value = match.group(2).lower() |
@@ -191,29 +202,56 @@ Patches in Pending state: %s""" % (total_patches, | |||
191 | def histogram(results): | 202 | def histogram(results): |
192 | from toolz import recipes, dicttoolz | 203 | from toolz import recipes, dicttoolz |
193 | import math | 204 | import math |
205 | |||
194 | counts = recipes.countby(lambda r: r.upstream_status, results.values()) | 206 | counts = recipes.countby(lambda r: r.upstream_status, results.values()) |
195 | bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts) | 207 | bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts) |
196 | for k in bars: | 208 | for k in bars: |
197 | print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k])) | 209 | print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k])) |
198 | 210 | ||
211 | def find_layers(candidate): | ||
212 | # candidate can either be the path to a layer directly (eg meta-intel), or a | ||
213 | # repository that contains other layers (meta-arm). We can determine what by | ||
214 | # looking for a conf/layer.conf file. If that file exists then it's a layer, | ||
215 | # otherwise its a repository of layers and we can assume they're called | ||
216 | # meta-*. | ||
217 | |||
218 | if (candidate / "conf" / "layer.conf").exists(): | ||
219 | return [candidate.absolute()] | ||
220 | else: | ||
221 | return [d.absolute() for d in candidate.iterdir() if d.is_dir() and (d.name == "meta" or d.name.startswith("meta-"))] | ||
222 | |||
223 | # TODO these don't actually handle dynamic-layers/ | ||
224 | |||
225 | def gather_patches(layers): | ||
226 | patches = [] | ||
227 | for directory in layers: | ||
228 | filenames = subprocess.check_output(("git", "-C", directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff"), universal_newlines=True).split() | ||
229 | patches += [os.path.join(directory, f) for f in filenames] | ||
230 | return patches | ||
231 | |||
232 | def count_recipes(layers): | ||
233 | count = 0 | ||
234 | for directory in layers: | ||
235 | output = subprocess.check_output(["git", "-C", directory, "ls-files", "recipes-*/**/*.bb"], universal_newlines=True) | ||
236 | count += len(output.splitlines()) | ||
237 | return count | ||
199 | 238 | ||
200 | if __name__ == "__main__": | 239 | if __name__ == "__main__": |
201 | import argparse, subprocess, os | ||
202 | |||
203 | args = argparse.ArgumentParser(description="Patch Review Tool") | 240 | args = argparse.ArgumentParser(description="Patch Review Tool") |
204 | args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches") | 241 | args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches") |
205 | args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results") | 242 | args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results") |
206 | args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram") | 243 | args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram") |
207 | args.add_argument("-j", "--json", help="update JSON") | 244 | args.add_argument("-j", "--json", help="update JSON") |
208 | args.add_argument("directory", help="directory to scan") | 245 | args.add_argument("directory", type=pathlib.Path, metavar="DIRECTORY", help="directory to scan (layer, or repository of layers)") |
209 | args = args.parse_args() | 246 | args = args.parse_args() |
210 | 247 | ||
211 | patches = subprocess.check_output(("git", "-C", args.directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff")).decode("utf-8").split() | 248 | layers = find_layers(args.directory) |
212 | results = patchreview(args.directory, patches) | 249 | print(f"Found layers {' '.join((d.name for d in layers))}") |
250 | patches = gather_patches(layers) | ||
251 | results = patchreview(patches) | ||
213 | analyse(results, want_blame=args.blame, verbose=args.verbose) | 252 | analyse(results, want_blame=args.blame, verbose=args.verbose) |
214 | 253 | ||
215 | if args.json: | 254 | if args.json: |
216 | import json, os.path, collections | ||
217 | if os.path.isfile(args.json): | 255 | if os.path.isfile(args.json): |
218 | data = json.load(open(args.json)) | 256 | data = json.load(open(args.json)) |
219 | else: | 257 | else: |
@@ -221,7 +259,11 @@ if __name__ == "__main__": | |||
221 | 259 | ||
222 | row = collections.Counter() | 260 | row = collections.Counter() |
223 | row["total"] = len(results) | 261 | row["total"] = len(results) |
224 | row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"]).decode("utf-8").strip() | 262 | row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"], universal_newlines=True).strip() |
263 | row["commit"] = subprocess.check_output(["git", "-C", args.directory, "rev-parse", "HEAD"], universal_newlines=True).strip() | ||
264 | row['commit_count'] = subprocess.check_output(["git", "-C", args.directory, "rev-list", "--count", "HEAD"], universal_newlines=True).strip() | ||
265 | row['recipe_count'] = count_recipes(layers) | ||
266 | |||
225 | for r in results.values(): | 267 | for r in results.values(): |
226 | if r.upstream_status in status_values: | 268 | if r.upstream_status in status_values: |
227 | row[r.upstream_status] += 1 | 269 | row[r.upstream_status] += 1 |
@@ -231,7 +273,7 @@ if __name__ == "__main__": | |||
231 | row['malformed-sob'] += 1 | 273 | row['malformed-sob'] += 1 |
232 | 274 | ||
233 | data.append(row) | 275 | data.append(row) |
234 | json.dump(data, open(args.json, "w")) | 276 | json.dump(data, open(args.json, "w"), sort_keys=True, indent="\t") |
235 | 277 | ||
236 | if args.histogram: | 278 | if args.histogram: |
237 | print() | 279 | print() |
diff --git a/scripts/contrib/test_build_time.sh b/scripts/contrib/test_build_time.sh index 23f238adf6..4012ac7ba7 100755 --- a/scripts/contrib/test_build_time.sh +++ b/scripts/contrib/test_build_time.sh | |||
@@ -97,7 +97,7 @@ if [ $? != 0 ] ; then | |||
97 | exit 251 | 97 | exit 251 |
98 | fi | 98 | fi |
99 | 99 | ||
100 | if [ "$BB_ENV_EXTRAWHITE" != "" ] ; then | 100 | if [ "BB_ENV_PASSTHROUGH_ADDITIONS" != "" ] ; then |
101 | echo "WARNING: you are running after sourcing the build environment script, this is not recommended" | 101 | echo "WARNING: you are running after sourcing the build environment script, this is not recommended" |
102 | fi | 102 | fi |
103 | 103 | ||
diff --git a/scripts/contrib/test_build_time_worker.sh b/scripts/contrib/test_build_time_worker.sh index 478e8b0d03..a2879d2336 100755 --- a/scripts/contrib/test_build_time_worker.sh +++ b/scripts/contrib/test_build_time_worker.sh | |||
@@ -1,5 +1,7 @@ | |||
1 | #!/bin/bash | 1 | #!/bin/bash |
2 | # | 2 | # |
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 6 | # |
5 | # This is an example script to be used in conjunction with test_build_time.sh | 7 | # This is an example script to be used in conjunction with test_build_time.sh |
diff --git a/scripts/contrib/verify-homepage.py b/scripts/contrib/verify-homepage.py index 7bffa78e23..a90b5010bc 100755 --- a/scripts/contrib/verify-homepage.py +++ b/scripts/contrib/verify-homepage.py | |||
@@ -1,5 +1,7 @@ | |||
1 | #!/usr/bin/env python3 | 1 | #!/usr/bin/env python3 |
2 | # | 2 | # |
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 6 | # |
5 | # This script can be used to verify HOMEPAGE values for all recipes in | 7 | # This script can be used to verify HOMEPAGE values for all recipes in |