diff options
Diffstat (limited to 'scripts/contrib')
25 files changed, 1733 insertions, 203 deletions
diff --git a/scripts/contrib/bb-perf/bb-matrix-plot.sh b/scripts/contrib/bb-perf/bb-matrix-plot.sh index e7bd129e9e..6672189c95 100755 --- a/scripts/contrib/bb-perf/bb-matrix-plot.sh +++ b/scripts/contrib/bb-perf/bb-matrix-plot.sh | |||
| @@ -16,8 +16,8 @@ | |||
| 16 | 16 | ||
| 17 | # Setup the defaults | 17 | # Setup the defaults |
| 18 | DATFILE="bb-matrix.dat" | 18 | DATFILE="bb-matrix.dat" |
| 19 | XLABEL="BB_NUMBER_THREADS" | 19 | XLABEL="BB\\\\_NUMBER\\\\_THREADS" |
| 20 | YLABEL="PARALLEL_MAKE" | 20 | YLABEL="PARALLEL\\\\_MAKE" |
| 21 | FIELD=3 | 21 | FIELD=3 |
| 22 | DEF_TITLE="Elapsed Time (seconds)" | 22 | DEF_TITLE="Elapsed Time (seconds)" |
| 23 | PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100" | 23 | PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100" |
diff --git a/scripts/contrib/bbvars.py b/scripts/contrib/bbvars.py index 090133600b..a9cdf082ab 100755 --- a/scripts/contrib/bbvars.py +++ b/scripts/contrib/bbvars.py | |||
| @@ -36,8 +36,8 @@ def bbvar_is_documented(var, documented_vars): | |||
| 36 | def collect_documented_vars(docfiles): | 36 | def collect_documented_vars(docfiles): |
| 37 | ''' Walk the docfiles and collect the documented variables ''' | 37 | ''' Walk the docfiles and collect the documented variables ''' |
| 38 | documented_vars = [] | 38 | documented_vars = [] |
| 39 | prog = re.compile(".*($|[^A-Z_])<glossentry id=\'var-") | 39 | prog = re.compile(r".*($|[^A-Z_])<glossentry id=\'var-") |
| 40 | var_prog = re.compile('<glossentry id=\'var-(.*)\'>') | 40 | var_prog = re.compile(r'<glossentry id=\'var-(.*)\'>') |
| 41 | for d in docfiles: | 41 | for d in docfiles: |
| 42 | with open(d) as f: | 42 | with open(d) as f: |
| 43 | documented_vars += var_prog.findall(f.read()) | 43 | documented_vars += var_prog.findall(f.read()) |
| @@ -45,7 +45,7 @@ def collect_documented_vars(docfiles): | |||
| 45 | return documented_vars | 45 | return documented_vars |
| 46 | 46 | ||
| 47 | def bbvar_doctag(var, docconf): | 47 | def bbvar_doctag(var, docconf): |
| 48 | prog = re.compile('^%s\[doc\] *= *"(.*)"' % (var)) | 48 | prog = re.compile(r'^%s\[doc\] *= *"(.*)"' % (var)) |
| 49 | if docconf == "": | 49 | if docconf == "": |
| 50 | return "?" | 50 | return "?" |
| 51 | 51 | ||
diff --git a/scripts/contrib/build-perf-test-wrapper.sh b/scripts/contrib/build-perf-test-wrapper.sh index fa71d4a2e9..0a85e6e708 100755 --- a/scripts/contrib/build-perf-test-wrapper.sh +++ b/scripts/contrib/build-perf-test-wrapper.sh | |||
| @@ -87,21 +87,10 @@ if [ $# -ne 0 ]; then | |||
| 87 | exit 1 | 87 | exit 1 |
| 88 | fi | 88 | fi |
| 89 | 89 | ||
| 90 | if [ -n "$email_to" ]; then | ||
| 91 | if ! [ -x "$(command -v phantomjs)" ]; then | ||
| 92 | echo "ERROR: Sending email needs phantomjs." | ||
| 93 | exit 1 | ||
| 94 | fi | ||
| 95 | if ! [ -x "$(command -v optipng)" ]; then | ||
| 96 | echo "ERROR: Sending email needs optipng." | ||
| 97 | exit 1 | ||
| 98 | fi | ||
| 99 | fi | ||
| 100 | |||
| 101 | # Open a file descriptor for flock and acquire lock | 90 | # Open a file descriptor for flock and acquire lock |
| 102 | LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock" | 91 | LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock" |
| 103 | if ! exec 3> "$LOCK_FILE"; then | 92 | if ! exec 3> "$LOCK_FILE"; then |
| 104 | echo "ERROR: Unable to open lock file" | 93 | echo "ERROR: Unable to open loemack file" |
| 105 | exit 1 | 94 | exit 1 |
| 106 | fi | 95 | fi |
| 107 | if ! flock -n 3; then | 96 | if ! flock -n 3; then |
| @@ -226,7 +215,7 @@ if [ -n "$results_repo" ]; then | |||
| 226 | if [ -n "$email_to" ]; then | 215 | if [ -n "$email_to" ]; then |
| 227 | echo "Emailing test report" | 216 | echo "Emailing test report" |
| 228 | os_name=`get_os_release_var PRETTY_NAME` | 217 | os_name=`get_os_release_var PRETTY_NAME` |
| 229 | "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt --html $report_html "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}" | 218 | "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}" |
| 230 | fi | 219 | fi |
| 231 | 220 | ||
| 232 | # Upload report files, unless we're on detached head | 221 | # Upload report files, unless we're on detached head |
diff --git a/scripts/contrib/convert-overrides.py b/scripts/contrib/convert-overrides.py new file mode 100755 index 0000000000..c69acb4095 --- /dev/null +++ b/scripts/contrib/convert-overrides.py | |||
| @@ -0,0 +1,155 @@ | |||
| 1 | #!/usr/bin/env python3 | ||
| 2 | # | ||
| 3 | # Conversion script to add new override syntax to existing bitbake metadata | ||
| 4 | # | ||
| 5 | # Copyright (C) 2021 Richard Purdie | ||
| 6 | # | ||
| 7 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 8 | # | ||
| 9 | |||
| 10 | # | ||
| 11 | # To use this script on a new layer you need to list the overrides the | ||
| 12 | # layer is known to use in the list below. | ||
| 13 | # | ||
| 14 | # Known constraint: Matching is 'loose' and in particular will find variable | ||
| 15 | # and function names with "_append" and "_remove" in them. Those need to be | ||
| 16 | # filtered out manually or in the skip list below. | ||
| 17 | # | ||
| 18 | |||
| 19 | import re | ||
| 20 | import os | ||
| 21 | import sys | ||
| 22 | import tempfile | ||
| 23 | import shutil | ||
| 24 | import mimetypes | ||
| 25 | import argparse | ||
| 26 | |||
| 27 | parser = argparse.ArgumentParser(description="Convert override syntax") | ||
| 28 | parser.add_argument("--override", "-o", action="append", default=[], help="Add additional strings to consider as an override (e.g. custom machines/distros") | ||
| 29 | parser.add_argument("--skip", "-s", action="append", default=[], help="Add additional string to skip and not consider an override") | ||
| 30 | parser.add_argument("--skip-ext", "-e", action="append", default=[], help="Additional file suffixes to skip when processing (e.g. '.foo')") | ||
| 31 | parser.add_argument("--package-vars", action="append", default=[], help="Additional variables to treat as package variables") | ||
| 32 | parser.add_argument("--image-vars", action="append", default=[], help="Additional variables to treat as image variables") | ||
| 33 | parser.add_argument("--short-override", action="append", default=[], help="Additional strings to treat as short overrides") | ||
| 34 | parser.add_argument("path", nargs="+", help="Paths to convert") | ||
| 35 | |||
| 36 | args = parser.parse_args() | ||
| 37 | |||
| 38 | # List of strings to treat as overrides | ||
| 39 | vars = args.override | ||
| 40 | vars += ["append", "prepend", "remove"] | ||
| 41 | vars += ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"] | ||
| 42 | vars += ["genericx86", "edgerouter", "beaglebone-yocto"] | ||
| 43 | vars += ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"] | ||
| 44 | vars += ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"] | ||
| 45 | vars += ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"] | ||
| 46 | vars += ["tune-", "pn-", "forcevariable"] | ||
| 47 | vars += ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"] | ||
| 48 | vars += ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"] | ||
| 49 | vars += ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"] | ||
| 50 | vars += ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"] | ||
| 51 | vars += ["linux-gnueabi", "eabi"] | ||
| 52 | vars += ["virtclass-multilib", "virtclass-mcextend"] | ||
| 53 | |||
| 54 | # List of strings to treat as overrides but only with whitespace following or another override (more restricted matching). | ||
| 55 | # Handles issues with arc matching arch. | ||
| 56 | shortvars = ["arc", "mips", "mipsel", "sh4"] + args.short_override | ||
| 57 | |||
| 58 | # Variables which take packagenames as an override | ||
| 59 | packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY", | ||
| 60 | "pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE", | ||
| 61 | "PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst", | ||
| 62 | "pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA", | ||
| 63 | "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"] + args.package_vars | ||
| 64 | |||
| 65 | # Expressions to skip if encountered, these are not overrides | ||
| 66 | skips = args.skip | ||
| 67 | skips += ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"] | ||
| 68 | skips += ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"] | ||
| 69 | skips += ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"] | ||
| 70 | skips += ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"] | ||
| 71 | skips += ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"] | ||
| 72 | skips += ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"] | ||
| 73 | skips += ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"] | ||
| 74 | skips += ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"] | ||
| 75 | skips += ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"] | ||
| 76 | |||
| 77 | imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"] + args.image_vars | ||
| 78 | packagevars += imagevars | ||
| 79 | |||
| 80 | skip_ext = [".html", ".patch", ".m4", ".diff"] + args.skip_ext | ||
| 81 | |||
| 82 | vars_re = {} | ||
| 83 | for exp in vars: | ||
| 84 | vars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp) | ||
| 85 | |||
| 86 | shortvars_re = {} | ||
| 87 | for exp in shortvars: | ||
| 88 | shortvars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + r'([\(\'"\s:])'), r"\1:" + exp + r"\3") | ||
| 89 | |||
| 90 | package_re = {} | ||
| 91 | for exp in packagevars: | ||
| 92 | package_re[exp] = (re.compile(r'(^|[#\'"\s\-\+]+)' + exp + r'_' + r'([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2") | ||
| 93 | |||
| 94 | # Other substitutions to make | ||
| 95 | subs = { | ||
| 96 | 'r = re.compile(r"([^:]+):\s*(.*)")' : 'r = re.compile(r"(^.+?):\s+(.*)")', | ||
| 97 | "val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))", | ||
| 98 | "f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))", | ||
| 99 | "d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))", | ||
| 100 | 'ret.append(v + "_" + p)' : 'ret.append(v + ":" + p)', | ||
| 101 | } | ||
| 102 | |||
| 103 | def processfile(fn): | ||
| 104 | print("processing file '%s'" % fn) | ||
| 105 | try: | ||
| 106 | fh, abs_path = tempfile.mkstemp() | ||
| 107 | with os.fdopen(fh, 'w') as new_file: | ||
| 108 | with open(fn, "r") as old_file: | ||
| 109 | for line in old_file: | ||
| 110 | skip = False | ||
| 111 | for s in skips: | ||
| 112 | if s in line: | ||
| 113 | skip = True | ||
| 114 | if "ptest_append" in line or "ptest_remove" in line or "ptest_prepend" in line: | ||
| 115 | skip = False | ||
| 116 | for sub in subs: | ||
| 117 | if sub in line: | ||
| 118 | line = line.replace(sub, subs[sub]) | ||
| 119 | skip = True | ||
| 120 | if not skip: | ||
| 121 | for pvar in packagevars: | ||
| 122 | line = package_re[pvar][0].sub(package_re[pvar][1], line) | ||
| 123 | for var in vars: | ||
| 124 | line = vars_re[var][0].sub(vars_re[var][1], line) | ||
| 125 | for shortvar in shortvars: | ||
| 126 | line = shortvars_re[shortvar][0].sub(shortvars_re[shortvar][1], line) | ||
| 127 | if "pkg_postinst:ontarget" in line: | ||
| 128 | line = line.replace("pkg_postinst:ontarget", "pkg_postinst_ontarget") | ||
| 129 | new_file.write(line) | ||
| 130 | shutil.copymode(fn, abs_path) | ||
| 131 | os.remove(fn) | ||
| 132 | shutil.move(abs_path, fn) | ||
| 133 | except UnicodeDecodeError: | ||
| 134 | pass | ||
| 135 | |||
| 136 | ourname = os.path.basename(sys.argv[0]) | ||
| 137 | ourversion = "0.9.3" | ||
| 138 | |||
| 139 | for p in args.path: | ||
| 140 | if os.path.isfile(p): | ||
| 141 | processfile(p) | ||
| 142 | else: | ||
| 143 | print("processing directory '%s'" % p) | ||
| 144 | for root, dirs, files in os.walk(p): | ||
| 145 | for name in files: | ||
| 146 | if name == ourname: | ||
| 147 | continue | ||
| 148 | fn = os.path.join(root, name) | ||
| 149 | if os.path.islink(fn): | ||
| 150 | continue | ||
| 151 | if "/.git/" in fn or any(fn.endswith(ext) for ext in skip_ext): | ||
| 152 | continue | ||
| 153 | processfile(fn) | ||
| 154 | |||
| 155 | print("All files processed with version %s" % ourversion) | ||
diff --git a/scripts/contrib/convert-spdx-licenses.py b/scripts/contrib/convert-spdx-licenses.py new file mode 100755 index 0000000000..13cf12a33f --- /dev/null +++ b/scripts/contrib/convert-spdx-licenses.py | |||
| @@ -0,0 +1,145 @@ | |||
| 1 | #!/usr/bin/env python3 | ||
| 2 | # | ||
| 3 | # Conversion script to change LICENSE entries to SPDX identifiers | ||
| 4 | # | ||
| 5 | # Copyright (C) 2021-2022 Richard Purdie | ||
| 6 | # | ||
| 7 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 8 | # | ||
| 9 | |||
| 10 | import re | ||
| 11 | import os | ||
| 12 | import sys | ||
| 13 | import tempfile | ||
| 14 | import shutil | ||
| 15 | import mimetypes | ||
| 16 | |||
| 17 | if len(sys.argv) < 2: | ||
| 18 | print("Please specify a directory to run the conversion script against.") | ||
| 19 | sys.exit(1) | ||
| 20 | |||
| 21 | license_map = { | ||
| 22 | "AGPL-3" : "AGPL-3.0-only", | ||
| 23 | "AGPL-3+" : "AGPL-3.0-or-later", | ||
| 24 | "AGPLv3" : "AGPL-3.0-only", | ||
| 25 | "AGPLv3+" : "AGPL-3.0-or-later", | ||
| 26 | "AGPLv3.0" : "AGPL-3.0-only", | ||
| 27 | "AGPLv3.0+" : "AGPL-3.0-or-later", | ||
| 28 | "AGPL-3.0" : "AGPL-3.0-only", | ||
| 29 | "AGPL-3.0+" : "AGPL-3.0-or-later", | ||
| 30 | "BSD-0-Clause" : "0BSD", | ||
| 31 | "GPL-1" : "GPL-1.0-only", | ||
| 32 | "GPL-1+" : "GPL-1.0-or-later", | ||
| 33 | "GPLv1" : "GPL-1.0-only", | ||
| 34 | "GPLv1+" : "GPL-1.0-or-later", | ||
| 35 | "GPLv1.0" : "GPL-1.0-only", | ||
| 36 | "GPLv1.0+" : "GPL-1.0-or-later", | ||
| 37 | "GPL-1.0" : "GPL-1.0-only", | ||
| 38 | "GPL-1.0+" : "GPL-1.0-or-later", | ||
| 39 | "GPL-2" : "GPL-2.0-only", | ||
| 40 | "GPL-2+" : "GPL-2.0-or-later", | ||
| 41 | "GPLv2" : "GPL-2.0-only", | ||
| 42 | "GPLv2+" : "GPL-2.0-or-later", | ||
| 43 | "GPLv2.0" : "GPL-2.0-only", | ||
| 44 | "GPLv2.0+" : "GPL-2.0-or-later", | ||
| 45 | "GPL-2.0" : "GPL-2.0-only", | ||
| 46 | "GPL-2.0+" : "GPL-2.0-or-later", | ||
| 47 | "GPL-3" : "GPL-3.0-only", | ||
| 48 | "GPL-3+" : "GPL-3.0-or-later", | ||
| 49 | "GPLv3" : "GPL-3.0-only", | ||
| 50 | "GPLv3+" : "GPL-3.0-or-later", | ||
| 51 | "GPLv3.0" : "GPL-3.0-only", | ||
| 52 | "GPLv3.0+" : "GPL-3.0-or-later", | ||
| 53 | "GPL-3.0" : "GPL-3.0-only", | ||
| 54 | "GPL-3.0+" : "GPL-3.0-or-later", | ||
| 55 | "LGPLv2" : "LGPL-2.0-only", | ||
| 56 | "LGPLv2+" : "LGPL-2.0-or-later", | ||
| 57 | "LGPLv2.0" : "LGPL-2.0-only", | ||
| 58 | "LGPLv2.0+" : "LGPL-2.0-or-later", | ||
| 59 | "LGPL-2.0" : "LGPL-2.0-only", | ||
| 60 | "LGPL-2.0+" : "LGPL-2.0-or-later", | ||
| 61 | "LGPL2.1" : "LGPL-2.1-only", | ||
| 62 | "LGPL2.1+" : "LGPL-2.1-or-later", | ||
| 63 | "LGPLv2.1" : "LGPL-2.1-only", | ||
| 64 | "LGPLv2.1+" : "LGPL-2.1-or-later", | ||
| 65 | "LGPL-2.1" : "LGPL-2.1-only", | ||
| 66 | "LGPL-2.1+" : "LGPL-2.1-or-later", | ||
| 67 | "LGPLv3" : "LGPL-3.0-only", | ||
| 68 | "LGPLv3+" : "LGPL-3.0-or-later", | ||
| 69 | "LGPL-3.0" : "LGPL-3.0-only", | ||
| 70 | "LGPL-3.0+" : "LGPL-3.0-or-later", | ||
| 71 | "MPL-1" : "MPL-1.0", | ||
| 72 | "MPLv1" : "MPL-1.0", | ||
| 73 | "MPLv1.1" : "MPL-1.1", | ||
| 74 | "MPLv2" : "MPL-2.0", | ||
| 75 | "MIT-X" : "MIT", | ||
| 76 | "MIT-style" : "MIT", | ||
| 77 | "openssl" : "OpenSSL", | ||
| 78 | "PSF" : "PSF-2.0", | ||
| 79 | "PSFv2" : "PSF-2.0", | ||
| 80 | "Python-2" : "Python-2.0", | ||
| 81 | "Apachev2" : "Apache-2.0", | ||
| 82 | "Apache-2" : "Apache-2.0", | ||
| 83 | "Artisticv1" : "Artistic-1.0", | ||
| 84 | "Artistic-1" : "Artistic-1.0", | ||
| 85 | "AFL-2" : "AFL-2.0", | ||
| 86 | "AFL-1" : "AFL-1.2", | ||
| 87 | "AFLv2" : "AFL-2.0", | ||
| 88 | "AFLv1" : "AFL-1.2", | ||
| 89 | "CDDLv1" : "CDDL-1.0", | ||
| 90 | "CDDL-1" : "CDDL-1.0", | ||
| 91 | "EPLv1.0" : "EPL-1.0", | ||
| 92 | "FreeType" : "FTL", | ||
| 93 | "Nauman" : "Naumen", | ||
| 94 | "tcl" : "TCL", | ||
| 95 | "vim" : "Vim", | ||
| 96 | "SGIv1" : "SGI-OpenGL", | ||
| 97 | } | ||
| 98 | |||
| 99 | def processfile(fn): | ||
| 100 | print("processing file '%s'" % fn) | ||
| 101 | try: | ||
| 102 | fh, abs_path = tempfile.mkstemp() | ||
| 103 | modified = False | ||
| 104 | with os.fdopen(fh, 'w') as new_file: | ||
| 105 | with open(fn, "r") as old_file: | ||
| 106 | for line in old_file: | ||
| 107 | if not line.startswith("LICENSE"): | ||
| 108 | new_file.write(line) | ||
| 109 | continue | ||
| 110 | orig = line | ||
| 111 | for license in sorted(license_map, key=len, reverse=True): | ||
| 112 | for ending in ['"', "'", " ", ")"]: | ||
| 113 | line = line.replace(license + ending, license_map[license] + ending) | ||
| 114 | if orig != line: | ||
| 115 | modified = True | ||
| 116 | new_file.write(line) | ||
| 117 | new_file.close() | ||
| 118 | if modified: | ||
| 119 | shutil.copymode(fn, abs_path) | ||
| 120 | os.remove(fn) | ||
| 121 | shutil.move(abs_path, fn) | ||
| 122 | except UnicodeDecodeError: | ||
| 123 | pass | ||
| 124 | |||
| 125 | ourname = os.path.basename(sys.argv[0]) | ||
| 126 | ourversion = "0.01" | ||
| 127 | |||
| 128 | if os.path.isfile(sys.argv[1]): | ||
| 129 | processfile(sys.argv[1]) | ||
| 130 | sys.exit(0) | ||
| 131 | |||
| 132 | for targetdir in sys.argv[1:]: | ||
| 133 | print("processing directory '%s'" % targetdir) | ||
| 134 | for root, dirs, files in os.walk(targetdir): | ||
| 135 | for name in files: | ||
| 136 | if name == ourname: | ||
| 137 | continue | ||
| 138 | fn = os.path.join(root, name) | ||
| 139 | if os.path.islink(fn): | ||
| 140 | continue | ||
| 141 | if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"): | ||
| 142 | continue | ||
| 143 | processfile(fn) | ||
| 144 | |||
| 145 | print("All files processed with version %s" % ourversion) | ||
diff --git a/scripts/contrib/convert-srcuri.py b/scripts/contrib/convert-srcuri.py new file mode 100755 index 0000000000..587392334f --- /dev/null +++ b/scripts/contrib/convert-srcuri.py | |||
| @@ -0,0 +1,77 @@ | |||
| 1 | #!/usr/bin/env python3 | ||
| 2 | # | ||
| 3 | # Conversion script to update SRC_URI to add branch to git urls | ||
| 4 | # | ||
| 5 | # Copyright (C) 2021 Richard Purdie | ||
| 6 | # | ||
| 7 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 8 | # | ||
| 9 | |||
| 10 | import re | ||
| 11 | import os | ||
| 12 | import sys | ||
| 13 | import tempfile | ||
| 14 | import shutil | ||
| 15 | import mimetypes | ||
| 16 | |||
| 17 | if len(sys.argv) < 2: | ||
| 18 | print("Please specify a directory to run the conversion script against.") | ||
| 19 | sys.exit(1) | ||
| 20 | |||
| 21 | def processfile(fn): | ||
| 22 | def matchline(line): | ||
| 23 | if "MIRROR" in line or ".*" in line or "GNOME_GIT" in line: | ||
| 24 | return False | ||
| 25 | return True | ||
| 26 | print("processing file '%s'" % fn) | ||
| 27 | try: | ||
| 28 | if "distro_alias.inc" in fn or "linux-yocto-custom.bb" in fn: | ||
| 29 | return | ||
| 30 | fh, abs_path = tempfile.mkstemp() | ||
| 31 | modified = False | ||
| 32 | with os.fdopen(fh, 'w') as new_file: | ||
| 33 | with open(fn, "r") as old_file: | ||
| 34 | for line in old_file: | ||
| 35 | if ("git://" in line or "gitsm://" in line) and "branch=" not in line and matchline(line): | ||
| 36 | if line.endswith('"\n'): | ||
| 37 | line = line.replace('"\n', ';branch=master"\n') | ||
| 38 | elif re.search('\s*\\\\$', line): | ||
| 39 | line = re.sub('\s*\\\\$', ';branch=master \\\\', line) | ||
| 40 | modified = True | ||
| 41 | if ("git://" in line or "gitsm://" in line) and "github.com" in line and "protocol=https" not in line and matchline(line): | ||
| 42 | if "protocol=git" in line: | ||
| 43 | line = line.replace('protocol=git', 'protocol=https') | ||
| 44 | elif line.endswith('"\n'): | ||
| 45 | line = line.replace('"\n', ';protocol=https"\n') | ||
| 46 | elif re.search('\s*\\\\$', line): | ||
| 47 | line = re.sub('\s*\\\\$', ';protocol=https \\\\', line) | ||
| 48 | modified = True | ||
| 49 | new_file.write(line) | ||
| 50 | if modified: | ||
| 51 | shutil.copymode(fn, abs_path) | ||
| 52 | os.remove(fn) | ||
| 53 | shutil.move(abs_path, fn) | ||
| 54 | except UnicodeDecodeError: | ||
| 55 | pass | ||
| 56 | |||
| 57 | ourname = os.path.basename(sys.argv[0]) | ||
| 58 | ourversion = "0.1" | ||
| 59 | |||
| 60 | if os.path.isfile(sys.argv[1]): | ||
| 61 | processfile(sys.argv[1]) | ||
| 62 | sys.exit(0) | ||
| 63 | |||
| 64 | for targetdir in sys.argv[1:]: | ||
| 65 | print("processing directory '%s'" % targetdir) | ||
| 66 | for root, dirs, files in os.walk(targetdir): | ||
| 67 | for name in files: | ||
| 68 | if name == ourname: | ||
| 69 | continue | ||
| 70 | fn = os.path.join(root, name) | ||
| 71 | if os.path.islink(fn): | ||
| 72 | continue | ||
| 73 | if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"): | ||
| 74 | continue | ||
| 75 | processfile(fn) | ||
| 76 | |||
| 77 | print("All files processed with version %s" % ourversion) | ||
diff --git a/scripts/contrib/convert-variable-renames.py b/scripts/contrib/convert-variable-renames.py new file mode 100755 index 0000000000..eded90ca61 --- /dev/null +++ b/scripts/contrib/convert-variable-renames.py | |||
| @@ -0,0 +1,116 @@ | |||
| 1 | #!/usr/bin/env python3 | ||
| 2 | # | ||
| 3 | # Conversion script to rename variables to versions with improved terminology. | ||
| 4 | # Also highlights potentially problematic language and removed variables. | ||
| 5 | # | ||
| 6 | # Copyright (C) 2021 Richard Purdie | ||
| 7 | # Copyright (C) 2022 Wind River Systems, Inc. | ||
| 8 | # | ||
| 9 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 10 | # | ||
| 11 | |||
| 12 | import re | ||
| 13 | import os | ||
| 14 | import sys | ||
| 15 | import tempfile | ||
| 16 | import shutil | ||
| 17 | import mimetypes | ||
| 18 | |||
| 19 | if len(sys.argv) < 2: | ||
| 20 | print("Please specify a directory to run the conversion script against.") | ||
| 21 | sys.exit(1) | ||
| 22 | |||
| 23 | renames = { | ||
| 24 | "BB_ENV_WHITELIST" : "BB_ENV_PASSTHROUGH", | ||
| 25 | "BB_ENV_EXTRAWHITE" : "BB_ENV_PASSTHROUGH_ADDITIONS", | ||
| 26 | "BB_HASHCONFIG_WHITELIST" : "BB_HASHCONFIG_IGNORE_VARS", | ||
| 27 | "BB_SETSCENE_ENFORCE_WHITELIST" : "BB_SETSCENE_ENFORCE_IGNORE_TASKS", | ||
| 28 | "BB_HASHBASE_WHITELIST" : "BB_BASEHASH_IGNORE_VARS", | ||
| 29 | "BB_HASHTASK_WHITELIST" : "BB_TASKHASH_IGNORE_TASKS", | ||
| 30 | "CVE_CHECK_PN_WHITELIST" : "CVE_CHECK_SKIP_RECIPE", | ||
| 31 | "CVE_CHECK_WHITELIST" : "CVE_CHECK_IGNORE", | ||
| 32 | "MULTI_PROVIDER_WHITELIST" : "BB_MULTI_PROVIDER_ALLOWED", | ||
| 33 | "PNBLACKLIST" : "SKIP_RECIPE", | ||
| 34 | "SDK_LOCAL_CONF_BLACKLIST" : "ESDK_LOCALCONF_REMOVE", | ||
| 35 | "SDK_LOCAL_CONF_WHITELIST" : "ESDK_LOCALCONF_ALLOW", | ||
| 36 | "SDK_INHERIT_BLACKLIST" : "ESDK_CLASS_INHERIT_DISABLE", | ||
| 37 | "SSTATE_DUPWHITELIST" : "SSTATE_ALLOW_OVERLAP_FILES", | ||
| 38 | "SYSROOT_DIRS_BLACKLIST" : "SYSROOT_DIRS_IGNORE", | ||
| 39 | "UNKNOWN_CONFIGURE_WHITELIST" : "UNKNOWN_CONFIGURE_OPT_IGNORE", | ||
| 40 | "ICECC_USER_CLASS_BL" : "ICECC_CLASS_DISABLE", | ||
| 41 | "ICECC_SYSTEM_CLASS_BL" : "ICECC_CLASS_DISABLE", | ||
| 42 | "ICECC_USER_PACKAGE_WL" : "ICECC_RECIPE_ENABLE", | ||
| 43 | "ICECC_USER_PACKAGE_BL" : "ICECC_RECIPE_DISABLE", | ||
| 44 | "ICECC_SYSTEM_PACKAGE_BL" : "ICECC_RECIPE_DISABLE", | ||
| 45 | "LICENSE_FLAGS_WHITELIST" : "LICENSE_FLAGS_ACCEPTED", | ||
| 46 | } | ||
| 47 | |||
| 48 | removed_list = [ | ||
| 49 | "BB_STAMP_WHITELIST", | ||
| 50 | "BB_STAMP_POLICY", | ||
| 51 | "INHERIT_BLACKLIST", | ||
| 52 | "TUNEABI_WHITELIST", | ||
| 53 | ] | ||
| 54 | |||
| 55 | context_check_list = [ | ||
| 56 | "blacklist", | ||
| 57 | "whitelist", | ||
| 58 | "abort", | ||
| 59 | ] | ||
| 60 | |||
| 61 | def processfile(fn): | ||
| 62 | |||
| 63 | print("processing file '%s'" % fn) | ||
| 64 | try: | ||
| 65 | fh, abs_path = tempfile.mkstemp() | ||
| 66 | modified = False | ||
| 67 | with os.fdopen(fh, 'w') as new_file: | ||
| 68 | with open(fn, "r") as old_file: | ||
| 69 | lineno = 0 | ||
| 70 | for line in old_file: | ||
| 71 | lineno += 1 | ||
| 72 | if not line or "BB_RENAMED_VARIABLE" in line: | ||
| 73 | continue | ||
| 74 | # Do the renames | ||
| 75 | for old_name, new_name in renames.items(): | ||
| 76 | if old_name in line: | ||
| 77 | line = line.replace(old_name, new_name) | ||
| 78 | modified = True | ||
| 79 | # Find removed names | ||
| 80 | for removed_name in removed_list: | ||
| 81 | if removed_name in line: | ||
| 82 | print("%s needs further work at line %s because %s has been deprecated" % (fn, lineno, removed_name)) | ||
| 83 | for check_word in context_check_list: | ||
| 84 | if re.search(check_word, line, re.IGNORECASE): | ||
| 85 | print("%s needs further work at line %s since it contains %s"% (fn, lineno, check_word)) | ||
| 86 | new_file.write(line) | ||
| 87 | new_file.close() | ||
| 88 | if modified: | ||
| 89 | print("*** Modified file '%s'" % (fn)) | ||
| 90 | shutil.copymode(fn, abs_path) | ||
| 91 | os.remove(fn) | ||
| 92 | shutil.move(abs_path, fn) | ||
| 93 | except UnicodeDecodeError: | ||
| 94 | pass | ||
| 95 | |||
| 96 | ourname = os.path.basename(sys.argv[0]) | ||
| 97 | ourversion = "0.1" | ||
| 98 | |||
| 99 | if os.path.isfile(sys.argv[1]): | ||
| 100 | processfile(sys.argv[1]) | ||
| 101 | sys.exit(0) | ||
| 102 | |||
| 103 | for targetdir in sys.argv[1:]: | ||
| 104 | print("processing directory '%s'" % targetdir) | ||
| 105 | for root, dirs, files in os.walk(targetdir): | ||
| 106 | for name in files: | ||
| 107 | if name == ourname: | ||
| 108 | continue | ||
| 109 | fn = os.path.join(root, name) | ||
| 110 | if os.path.islink(fn): | ||
| 111 | continue | ||
| 112 | if "ChangeLog" in fn or "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"): | ||
| 113 | continue | ||
| 114 | processfile(fn) | ||
| 115 | |||
| 116 | print("All files processed with version %s" % ourversion) | ||
diff --git a/scripts/contrib/ddimage b/scripts/contrib/ddimage index 7f2ad112a6..70eee8ebea 100755 --- a/scripts/contrib/ddimage +++ b/scripts/contrib/ddimage | |||
| @@ -1,5 +1,7 @@ | |||
| 1 | #!/bin/sh | 1 | #!/bin/sh |
| 2 | # | 2 | # |
| 3 | # Copyright OpenEmbedded Contributors | ||
| 4 | # | ||
| 3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
| 4 | # | 6 | # |
| 5 | 7 | ||
diff --git a/scripts/contrib/dialog-power-control b/scripts/contrib/dialog-power-control index ad6070c369..82c84baa1d 100755 --- a/scripts/contrib/dialog-power-control +++ b/scripts/contrib/dialog-power-control | |||
| @@ -1,5 +1,7 @@ | |||
| 1 | #!/bin/sh | 1 | #!/bin/sh |
| 2 | # | 2 | # |
| 3 | # Copyright OpenEmbedded Contributors | ||
| 4 | # | ||
| 3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
| 4 | # | 6 | # |
| 5 | # Simple script to show a manual power prompt for when you want to use | 7 | # Simple script to show a manual power prompt for when you want to use |
diff --git a/scripts/contrib/documentation-audit.sh b/scripts/contrib/documentation-audit.sh index 1191f57a8e..7197a2fcea 100755 --- a/scripts/contrib/documentation-audit.sh +++ b/scripts/contrib/documentation-audit.sh | |||
| @@ -1,5 +1,7 @@ | |||
| 1 | #!/bin/bash | 1 | #!/bin/bash |
| 2 | # | 2 | # |
| 3 | # Copyright OpenEmbedded Contributors | ||
| 4 | # | ||
| 3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
| 4 | # | 6 | # |
| 5 | # Perform an audit of which packages provide documentation and which | 7 | # Perform an audit of which packages provide documentation and which |
| @@ -26,8 +28,8 @@ if [ -z "$BITBAKE" ]; then | |||
| 26 | fi | 28 | fi |
| 27 | 29 | ||
| 28 | echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results" | 30 | echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results" |
| 29 | echo "REMINDER: you need to set LICENSE_FLAGS_WHITELIST appropriately in local.conf or " | 31 | echo "REMINDER: you need to set LICENSE_FLAGS_ACCEPTED appropriately in local.conf or " |
| 30 | echo " you'll get false positives. For example, LICENSE_FLAGS_WHITELIST = \"Commercial\"" | 32 | echo " you'll get false positives. For example, LICENSE_FLAGS_ACCEPTED = \"commercial\"" |
| 31 | 33 | ||
| 32 | for pkg in `bitbake -s | awk '{ print \$1 }'`; do | 34 | for pkg in `bitbake -s | awk '{ print \$1 }'`; do |
| 33 | if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" || | 35 | if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" || |
diff --git a/scripts/contrib/image-manifest b/scripts/contrib/image-manifest new file mode 100755 index 0000000000..4d65a99258 --- /dev/null +++ b/scripts/contrib/image-manifest | |||
| @@ -0,0 +1,523 @@ | |||
| 1 | #!/usr/bin/env python3 | ||
| 2 | |||
| 3 | # Script to extract information from image manifests | ||
| 4 | # | ||
| 5 | # Copyright (C) 2018 Intel Corporation | ||
| 6 | # Copyright (C) 2021 Wind River Systems, Inc. | ||
| 7 | # | ||
| 8 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 9 | # | ||
| 10 | |||
| 11 | import sys | ||
| 12 | import os | ||
| 13 | import argparse | ||
| 14 | import logging | ||
| 15 | import json | ||
| 16 | import shutil | ||
| 17 | import tempfile | ||
| 18 | import tarfile | ||
| 19 | from collections import OrderedDict | ||
| 20 | |||
| 21 | scripts_path = os.path.dirname(__file__) | ||
| 22 | lib_path = scripts_path + '/../lib' | ||
| 23 | sys.path = sys.path + [lib_path] | ||
| 24 | |||
| 25 | import scriptutils | ||
| 26 | logger = scriptutils.logger_create(os.path.basename(__file__)) | ||
| 27 | |||
| 28 | import argparse_oe | ||
| 29 | import scriptpath | ||
| 30 | bitbakepath = scriptpath.add_bitbake_lib_path() | ||
| 31 | if not bitbakepath: | ||
| 32 | logger.error("Unable to find bitbake by searching parent directory of this script or PATH") | ||
| 33 | sys.exit(1) | ||
| 34 | logger.debug('Using standard bitbake path %s' % bitbakepath) | ||
| 35 | scriptpath.add_oe_lib_path() | ||
| 36 | |||
| 37 | import bb.tinfoil | ||
| 38 | import bb.utils | ||
| 39 | import oe.utils | ||
| 40 | import oe.recipeutils | ||
| 41 | |||
| 42 | def get_pkg_list(manifest): | ||
| 43 | pkglist = [] | ||
| 44 | with open(manifest, 'r') as f: | ||
| 45 | for line in f: | ||
| 46 | linesplit = line.split() | ||
| 47 | if len(linesplit) == 3: | ||
| 48 | # manifest file | ||
| 49 | pkglist.append(linesplit[0]) | ||
| 50 | elif len(linesplit) == 1: | ||
| 51 | # build dependency file | ||
| 52 | pkglist.append(linesplit[0]) | ||
| 53 | return sorted(pkglist) | ||
| 54 | |||
| 55 | def list_packages(args): | ||
| 56 | pkglist = get_pkg_list(args.manifest) | ||
| 57 | for pkg in pkglist: | ||
| 58 | print('%s' % pkg) | ||
| 59 | |||
| 60 | def pkg2recipe(tinfoil, pkg): | ||
| 61 | if "-native" in pkg: | ||
| 62 | logger.info('skipping %s' % pkg) | ||
| 63 | return None | ||
| 64 | |||
| 65 | pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR') | ||
| 66 | pkgdatafile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg) | ||
| 67 | logger.debug('pkgdatafile %s' % pkgdatafile) | ||
| 68 | try: | ||
| 69 | f = open(pkgdatafile, 'r') | ||
| 70 | for line in f: | ||
| 71 | if line.startswith('PN:'): | ||
| 72 | recipe = line.split(':', 1)[1].strip() | ||
| 73 | return recipe | ||
| 74 | except Exception: | ||
| 75 | logger.warning('%s is missing' % pkgdatafile) | ||
| 76 | return None | ||
| 77 | |||
| 78 | def get_recipe_list(manifest, tinfoil): | ||
| 79 | pkglist = get_pkg_list(manifest) | ||
| 80 | recipelist = [] | ||
| 81 | for pkg in pkglist: | ||
| 82 | recipe = pkg2recipe(tinfoil,pkg) | ||
| 83 | if recipe: | ||
| 84 | if not recipe in recipelist: | ||
| 85 | recipelist.append(recipe) | ||
| 86 | |||
| 87 | return sorted(recipelist) | ||
| 88 | |||
| 89 | def list_recipes(args): | ||
| 90 | import bb.tinfoil | ||
| 91 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
| 92 | tinfoil.logger.setLevel(logger.getEffectiveLevel()) | ||
| 93 | tinfoil.prepare(config_only=True) | ||
| 94 | recipelist = get_recipe_list(args.manifest, tinfoil) | ||
| 95 | for recipe in sorted(recipelist): | ||
| 96 | print('%s' % recipe) | ||
| 97 | |||
| 98 | def list_layers(args): | ||
| 99 | |||
| 100 | def find_git_repo(pth): | ||
| 101 | checkpth = pth | ||
| 102 | while checkpth != os.sep: | ||
| 103 | if os.path.exists(os.path.join(checkpth, '.git')): | ||
| 104 | return checkpth | ||
| 105 | checkpth = os.path.dirname(checkpth) | ||
| 106 | return None | ||
| 107 | |||
| 108 | def get_git_remote_branch(repodir): | ||
| 109 | try: | ||
| 110 | stdout, _ = bb.process.run(['git', 'rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{u}'], cwd=repodir) | ||
| 111 | except bb.process.ExecutionError as e: | ||
| 112 | stdout = None | ||
| 113 | if stdout: | ||
| 114 | return stdout.strip() | ||
| 115 | else: | ||
| 116 | return None | ||
| 117 | |||
| 118 | def get_git_head_commit(repodir): | ||
| 119 | try: | ||
| 120 | stdout, _ = bb.process.run(['git', 'rev-parse', 'HEAD'], cwd=repodir) | ||
| 121 | except bb.process.ExecutionError as e: | ||
| 122 | stdout = None | ||
| 123 | if stdout: | ||
| 124 | return stdout.strip() | ||
| 125 | else: | ||
| 126 | return None | ||
| 127 | |||
| 128 | def get_git_repo_url(repodir, remote='origin'): | ||
| 129 | import bb.process | ||
| 130 | # Try to get upstream repo location from origin remote | ||
| 131 | try: | ||
| 132 | stdout, _ = bb.process.run(['git', 'remote', '-v'], cwd=repodir) | ||
| 133 | except bb.process.ExecutionError as e: | ||
| 134 | stdout = None | ||
| 135 | if stdout: | ||
| 136 | for line in stdout.splitlines(): | ||
| 137 | splitline = line.split() | ||
| 138 | if len(splitline) > 1: | ||
| 139 | if splitline[0] == remote and scriptutils.is_src_url(splitline[1]): | ||
| 140 | return splitline[1] | ||
| 141 | return None | ||
| 142 | |||
| 143 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
| 144 | tinfoil.logger.setLevel(logger.getEffectiveLevel()) | ||
| 145 | tinfoil.prepare(config_only=False) | ||
| 146 | layers = OrderedDict() | ||
| 147 | for layerdir in tinfoil.config_data.getVar('BBLAYERS').split(): | ||
| 148 | layerdata = OrderedDict() | ||
| 149 | layername = os.path.basename(layerdir) | ||
| 150 | logger.debug('layername %s, layerdir %s' % (layername, layerdir)) | ||
| 151 | if layername in layers: | ||
| 152 | logger.warning('layername %s is not unique in configuration' % layername) | ||
| 153 | layername = os.path.basename(os.path.dirname(layerdir)) + '_' + os.path.basename(layerdir) | ||
| 154 | logger.debug('trying layername %s' % layername) | ||
| 155 | if layername in layers: | ||
| 156 | logger.error('Layer name %s is not unique in configuration' % layername) | ||
| 157 | sys.exit(2) | ||
| 158 | repodir = find_git_repo(layerdir) | ||
| 159 | if repodir: | ||
| 160 | remotebranch = get_git_remote_branch(repodir) | ||
| 161 | remote = 'origin' | ||
| 162 | if remotebranch and '/' in remotebranch: | ||
| 163 | rbsplit = remotebranch.split('/', 1) | ||
| 164 | layerdata['actual_branch'] = rbsplit[1] | ||
| 165 | remote = rbsplit[0] | ||
| 166 | layerdata['vcs_url'] = get_git_repo_url(repodir, remote) | ||
| 167 | if os.path.abspath(repodir) != os.path.abspath(layerdir): | ||
| 168 | layerdata['vcs_subdir'] = os.path.relpath(layerdir, repodir) | ||
| 169 | commit = get_git_head_commit(repodir) | ||
| 170 | if commit: | ||
| 171 | layerdata['vcs_commit'] = commit | ||
| 172 | layers[layername] = layerdata | ||
| 173 | |||
| 174 | json.dump(layers, args.output, indent=2) | ||
| 175 | |||
| 176 | def get_recipe(args): | ||
| 177 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
| 178 | tinfoil.logger.setLevel(logger.getEffectiveLevel()) | ||
| 179 | tinfoil.prepare(config_only=True) | ||
| 180 | |||
| 181 | recipe = pkg2recipe(tinfoil, args.package) | ||
| 182 | print(' %s package provided by %s' % (args.package, recipe)) | ||
| 183 | |||
| 184 | def pkg_dependencies(args): | ||
| 185 | def get_recipe_info(tinfoil, recipe): | ||
| 186 | try: | ||
| 187 | info = tinfoil.get_recipe_info(recipe) | ||
| 188 | except Exception: | ||
| 189 | logger.error('Failed to get recipe info for: %s' % recipe) | ||
| 190 | sys.exit(1) | ||
| 191 | if not info: | ||
| 192 | logger.warning('No recipe info found for: %s' % recipe) | ||
| 193 | sys.exit(1) | ||
| 194 | append_files = tinfoil.get_file_appends(info.fn) | ||
| 195 | appends = True | ||
| 196 | data = tinfoil.parse_recipe_file(info.fn, appends, append_files) | ||
| 197 | data.pn = info.pn | ||
| 198 | data.pv = info.pv | ||
| 199 | return data | ||
| 200 | |||
| 201 | def find_dependencies(tinfoil, assume_provided, recipe_info, packages, rn, order): | ||
| 202 | spaces = ' ' * order | ||
| 203 | data = recipe_info[rn] | ||
| 204 | if args.native: | ||
| 205 | logger.debug('%s- %s' % (spaces, data.pn)) | ||
| 206 | elif "-native" not in data.pn: | ||
| 207 | if "cross" not in data.pn: | ||
| 208 | logger.debug('%s- %s' % (spaces, data.pn)) | ||
| 209 | |||
| 210 | depends = [] | ||
| 211 | for dep in data.depends: | ||
| 212 | if dep not in assume_provided: | ||
| 213 | depends.append(dep) | ||
| 214 | |||
| 215 | # First find all dependencies not in package list. | ||
| 216 | for dep in depends: | ||
| 217 | if dep not in packages: | ||
| 218 | packages.append(dep) | ||
| 219 | dep_data = get_recipe_info(tinfoil, dep) | ||
| 220 | # Do this once now to reduce the number of bitbake calls. | ||
| 221 | dep_data.depends = dep_data.getVar('DEPENDS').split() | ||
| 222 | recipe_info[dep] = dep_data | ||
| 223 | |||
| 224 | # Then recursively analyze all of the dependencies for the current recipe. | ||
| 225 | for dep in depends: | ||
| 226 | find_dependencies(tinfoil, assume_provided, recipe_info, packages, dep, order + 1) | ||
| 227 | |||
| 228 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
| 229 | tinfoil.logger.setLevel(logger.getEffectiveLevel()) | ||
| 230 | tinfoil.prepare() | ||
| 231 | |||
| 232 | assume_provided = tinfoil.config_data.getVar('ASSUME_PROVIDED').split() | ||
| 233 | logger.debug('assumed provided:') | ||
| 234 | for ap in sorted(assume_provided): | ||
| 235 | logger.debug(' - %s' % ap) | ||
| 236 | |||
| 237 | recipe = pkg2recipe(tinfoil, args.package) | ||
| 238 | data = get_recipe_info(tinfoil, recipe) | ||
| 239 | data.depends = [] | ||
| 240 | depends = data.getVar('DEPENDS').split() | ||
| 241 | for dep in depends: | ||
| 242 | if dep not in assume_provided: | ||
| 243 | data.depends.append(dep) | ||
| 244 | |||
| 245 | recipe_info = dict([(recipe, data)]) | ||
| 246 | packages = [] | ||
| 247 | find_dependencies(tinfoil, assume_provided, recipe_info, packages, recipe, order=1) | ||
| 248 | |||
| 249 | print('\nThe following packages are required to build %s' % recipe) | ||
| 250 | for p in sorted(packages): | ||
| 251 | data = recipe_info[p] | ||
| 252 | if "-native" not in data.pn: | ||
| 253 | if "cross" not in data.pn: | ||
| 254 | print(" %s (%s)" % (data.pn,p)) | ||
| 255 | |||
| 256 | if args.native: | ||
| 257 | print('\nThe following native packages are required to build %s' % recipe) | ||
| 258 | for p in sorted(packages): | ||
| 259 | data = recipe_info[p] | ||
| 260 | if "-native" in data.pn: | ||
| 261 | print(" %s(%s)" % (data.pn,p)) | ||
| 262 | if "cross" in data.pn: | ||
| 263 | print(" %s(%s)" % (data.pn,p)) | ||
| 264 | |||
| 265 | def default_config(): | ||
| 266 | vlist = OrderedDict() | ||
| 267 | vlist['PV'] = 'yes' | ||
| 268 | vlist['SUMMARY'] = 'no' | ||
| 269 | vlist['DESCRIPTION'] = 'no' | ||
| 270 | vlist['SECTION'] = 'no' | ||
| 271 | vlist['LICENSE'] = 'yes' | ||
| 272 | vlist['HOMEPAGE'] = 'no' | ||
| 273 | vlist['BUGTRACKER'] = 'no' | ||
| 274 | vlist['PROVIDES'] = 'no' | ||
| 275 | vlist['BBCLASSEXTEND'] = 'no' | ||
| 276 | vlist['DEPENDS'] = 'no' | ||
| 277 | vlist['PACKAGECONFIG'] = 'no' | ||
| 278 | vlist['SRC_URI'] = 'yes' | ||
| 279 | vlist['SRCREV'] = 'yes' | ||
| 280 | vlist['EXTRA_OECONF'] = 'no' | ||
| 281 | vlist['EXTRA_OESCONS'] = 'no' | ||
| 282 | vlist['EXTRA_OECMAKE'] = 'no' | ||
| 283 | vlist['EXTRA_OEMESON'] = 'no' | ||
| 284 | |||
| 285 | clist = OrderedDict() | ||
| 286 | clist['variables'] = vlist | ||
| 287 | clist['filepath'] = 'no' | ||
| 288 | clist['sha256sum'] = 'no' | ||
| 289 | clist['layerdir'] = 'no' | ||
| 290 | clist['layer'] = 'no' | ||
| 291 | clist['inherits'] = 'no' | ||
| 292 | clist['source_urls'] = 'no' | ||
| 293 | clist['packageconfig_opts'] = 'no' | ||
| 294 | clist['patches'] = 'no' | ||
| 295 | clist['packagedir'] = 'no' | ||
| 296 | return clist | ||
| 297 | |||
| 298 | def dump_config(args): | ||
| 299 | config = default_config() | ||
| 300 | f = open('default_config.json', 'w') | ||
| 301 | json.dump(config, f, indent=2) | ||
| 302 | logger.info('Default config list dumped to default_config.json') | ||
| 303 | |||
| 304 | def export_manifest_info(args): | ||
| 305 | |||
| 306 | def handle_value(value): | ||
| 307 | if value: | ||
| 308 | return oe.utils.squashspaces(value) | ||
| 309 | else: | ||
| 310 | return value | ||
| 311 | |||
| 312 | if args.config: | ||
| 313 | logger.debug('config: %s' % args.config) | ||
| 314 | f = open(args.config, 'r') | ||
| 315 | config = json.load(f, object_pairs_hook=OrderedDict) | ||
| 316 | else: | ||
| 317 | config = default_config() | ||
| 318 | if logger.isEnabledFor(logging.DEBUG): | ||
| 319 | print('Configuration:') | ||
| 320 | json.dump(config, sys.stdout, indent=2) | ||
| 321 | print('') | ||
| 322 | |||
| 323 | tmpoutdir = tempfile.mkdtemp(prefix=os.path.basename(__file__)+'-') | ||
| 324 | logger.debug('tmp dir: %s' % tmpoutdir) | ||
| 325 | |||
| 326 | # export manifest | ||
| 327 | shutil.copy2(args.manifest,os.path.join(tmpoutdir, "manifest")) | ||
| 328 | |||
| 329 | with bb.tinfoil.Tinfoil(tracking=True) as tinfoil: | ||
| 330 | tinfoil.logger.setLevel(logger.getEffectiveLevel()) | ||
| 331 | tinfoil.prepare(config_only=False) | ||
| 332 | |||
| 333 | pkglist = get_pkg_list(args.manifest) | ||
| 334 | # export pkg list | ||
| 335 | f = open(os.path.join(tmpoutdir, "pkgs"), 'w') | ||
| 336 | for pkg in pkglist: | ||
| 337 | f.write('%s\n' % pkg) | ||
| 338 | f.close() | ||
| 339 | |||
| 340 | recipelist = [] | ||
| 341 | for pkg in pkglist: | ||
| 342 | recipe = pkg2recipe(tinfoil,pkg) | ||
| 343 | if recipe: | ||
| 344 | if not recipe in recipelist: | ||
| 345 | recipelist.append(recipe) | ||
| 346 | recipelist.sort() | ||
| 347 | # export recipe list | ||
| 348 | f = open(os.path.join(tmpoutdir, "recipes"), 'w') | ||
| 349 | for recipe in recipelist: | ||
| 350 | f.write('%s\n' % recipe) | ||
| 351 | f.close() | ||
| 352 | |||
| 353 | try: | ||
| 354 | rvalues = OrderedDict() | ||
| 355 | for pn in sorted(recipelist): | ||
| 356 | logger.debug('Package: %s' % pn) | ||
| 357 | rd = tinfoil.parse_recipe(pn) | ||
| 358 | |||
| 359 | rvalues[pn] = OrderedDict() | ||
| 360 | |||
| 361 | for varname in config['variables']: | ||
| 362 | if config['variables'][varname] == 'yes': | ||
| 363 | rvalues[pn][varname] = handle_value(rd.getVar(varname)) | ||
| 364 | |||
| 365 | fpth = rd.getVar('FILE') | ||
| 366 | layerdir = oe.recipeutils.find_layerdir(fpth) | ||
| 367 | if config['filepath'] == 'yes': | ||
| 368 | rvalues[pn]['filepath'] = os.path.relpath(fpth, layerdir) | ||
| 369 | if config['sha256sum'] == 'yes': | ||
| 370 | rvalues[pn]['sha256sum'] = bb.utils.sha256_file(fpth) | ||
| 371 | |||
| 372 | if config['layerdir'] == 'yes': | ||
| 373 | rvalues[pn]['layerdir'] = layerdir | ||
| 374 | |||
| 375 | if config['layer'] == 'yes': | ||
| 376 | rvalues[pn]['layer'] = os.path.basename(layerdir) | ||
| 377 | |||
| 378 | if config['inherits'] == 'yes': | ||
| 379 | gr = set(tinfoil.config_data.getVar("__inherit_cache") or []) | ||
| 380 | lr = set(rd.getVar("__inherit_cache") or []) | ||
| 381 | rvalues[pn]['inherits'] = sorted({os.path.splitext(os.path.basename(r))[0] for r in lr if r not in gr}) | ||
| 382 | |||
| 383 | if config['source_urls'] == 'yes': | ||
| 384 | rvalues[pn]['source_urls'] = [] | ||
| 385 | for url in (rd.getVar('SRC_URI') or '').split(): | ||
| 386 | if not url.startswith('file://'): | ||
| 387 | url = url.split(';')[0] | ||
| 388 | rvalues[pn]['source_urls'].append(url) | ||
| 389 | |||
| 390 | if config['packageconfig_opts'] == 'yes': | ||
| 391 | rvalues[pn]['packageconfig_opts'] = OrderedDict() | ||
| 392 | for key in rd.getVarFlags('PACKAGECONFIG').keys(): | ||
| 393 | if key == 'doc': | ||
| 394 | continue | ||
| 395 | rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key) | ||
| 396 | |||
| 397 | if config['patches'] == 'yes': | ||
| 398 | patches = oe.recipeutils.get_recipe_patches(rd) | ||
| 399 | rvalues[pn]['patches'] = [] | ||
| 400 | if patches: | ||
| 401 | recipeoutdir = os.path.join(tmpoutdir, pn, 'patches') | ||
| 402 | bb.utils.mkdirhier(recipeoutdir) | ||
| 403 | for patch in patches: | ||
| 404 | # Patches may be in other layers too | ||
| 405 | patchlayerdir = oe.recipeutils.find_layerdir(patch) | ||
| 406 | # patchlayerdir will be None for remote patches, which we ignore | ||
| 407 | # (since currently they are considered as part of sources) | ||
| 408 | if patchlayerdir: | ||
| 409 | rvalues[pn]['patches'].append((os.path.basename(patchlayerdir), os.path.relpath(patch, patchlayerdir))) | ||
| 410 | shutil.copy(patch, recipeoutdir) | ||
| 411 | |||
| 412 | if config['packagedir'] == 'yes': | ||
| 413 | pn_dir = os.path.join(tmpoutdir, pn) | ||
| 414 | bb.utils.mkdirhier(pn_dir) | ||
| 415 | f = open(os.path.join(pn_dir, 'recipe.json'), 'w') | ||
| 416 | json.dump(rvalues[pn], f, indent=2) | ||
| 417 | f.close() | ||
| 418 | |||
| 419 | with open(os.path.join(tmpoutdir, 'recipes.json'), 'w') as f: | ||
| 420 | json.dump(rvalues, f, indent=2) | ||
| 421 | |||
| 422 | if args.output: | ||
| 423 | outname = os.path.basename(args.output) | ||
| 424 | else: | ||
| 425 | outname = os.path.splitext(os.path.basename(args.manifest))[0] | ||
| 426 | if outname.endswith('.tar.gz'): | ||
| 427 | outname = outname[:-7] | ||
| 428 | elif outname.endswith('.tgz'): | ||
| 429 | outname = outname[:-4] | ||
| 430 | |||
| 431 | tarfn = outname | ||
| 432 | if tarfn.endswith(os.sep): | ||
| 433 | tarfn = tarfn[:-1] | ||
| 434 | if not tarfn.endswith(('.tar.gz', '.tgz')): | ||
| 435 | tarfn += '.tar.gz' | ||
| 436 | with open(tarfn, 'wb') as f: | ||
| 437 | with tarfile.open(None, "w:gz", f) as tar: | ||
| 438 | tar.add(tmpoutdir, outname) | ||
| 439 | finally: | ||
| 440 | shutil.rmtree(tmpoutdir) | ||
| 441 | |||
| 442 | |||
| 443 | def main(): | ||
| 444 | parser = argparse_oe.ArgumentParser(description="Image manifest utility", | ||
| 445 | epilog="Use %(prog)s <subcommand> --help to get help on a specific command") | ||
| 446 | parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true') | ||
| 447 | parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true') | ||
| 448 | subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>') | ||
| 449 | subparsers.required = True | ||
| 450 | |||
| 451 | # get recipe info | ||
| 452 | parser_get_recipes = subparsers.add_parser('recipe-info', | ||
| 453 | help='Get recipe info', | ||
| 454 | description='Get recipe information for a package') | ||
| 455 | parser_get_recipes.add_argument('package', help='Package name') | ||
| 456 | parser_get_recipes.set_defaults(func=get_recipe) | ||
| 457 | |||
| 458 | # list runtime dependencies | ||
| 459 | parser_pkg_dep = subparsers.add_parser('list-depends', | ||
| 460 | help='List dependencies', | ||
| 461 | description='List dependencies required to build the package') | ||
| 462 | parser_pkg_dep.add_argument('--native', help='also print native and cross packages', action='store_true') | ||
| 463 | parser_pkg_dep.add_argument('package', help='Package name') | ||
| 464 | parser_pkg_dep.set_defaults(func=pkg_dependencies) | ||
| 465 | |||
| 466 | # list recipes | ||
| 467 | parser_recipes = subparsers.add_parser('list-recipes', | ||
| 468 | help='List recipes producing packages within an image', | ||
| 469 | description='Lists recipes producing the packages that went into an image, using the manifest and pkgdata') | ||
| 470 | parser_recipes.add_argument('manifest', help='Manifest file') | ||
| 471 | parser_recipes.set_defaults(func=list_recipes) | ||
| 472 | |||
| 473 | # list packages | ||
| 474 | parser_packages = subparsers.add_parser('list-packages', | ||
| 475 | help='List packages within an image', | ||
| 476 | description='Lists packages that went into an image, using the manifest') | ||
| 477 | parser_packages.add_argument('manifest', help='Manifest file') | ||
| 478 | parser_packages.set_defaults(func=list_packages) | ||
| 479 | |||
| 480 | # list layers | ||
| 481 | parser_layers = subparsers.add_parser('list-layers', | ||
| 482 | help='List included layers', | ||
| 483 | description='Lists included layers') | ||
| 484 | parser_layers.add_argument('-o', '--output', help='Output file - defaults to stdout if not specified', | ||
| 485 | default=sys.stdout, type=argparse.FileType('w')) | ||
| 486 | parser_layers.set_defaults(func=list_layers) | ||
| 487 | |||
| 488 | # dump default configuration file | ||
| 489 | parser_dconfig = subparsers.add_parser('dump-config', | ||
| 490 | help='Dump default config', | ||
| 491 | description='Dump default config to default_config.json') | ||
| 492 | parser_dconfig.set_defaults(func=dump_config) | ||
| 493 | |||
| 494 | # export recipe info for packages in manifest | ||
| 495 | parser_export = subparsers.add_parser('manifest-info', | ||
| 496 | help='Export recipe info for a manifest', | ||
| 497 | description='Export recipe information using the manifest') | ||
| 498 | parser_export.add_argument('-c', '--config', help='load config from json file') | ||
| 499 | parser_export.add_argument('-o', '--output', help='Output file (tarball) - defaults to manifest name if not specified') | ||
| 500 | parser_export.add_argument('manifest', help='Manifest file') | ||
| 501 | parser_export.set_defaults(func=export_manifest_info) | ||
| 502 | |||
| 503 | args = parser.parse_args() | ||
| 504 | |||
| 505 | if args.debug: | ||
| 506 | logger.setLevel(logging.DEBUG) | ||
| 507 | logger.debug("Debug Enabled") | ||
| 508 | elif args.quiet: | ||
| 509 | logger.setLevel(logging.ERROR) | ||
| 510 | |||
| 511 | ret = args.func(args) | ||
| 512 | |||
| 513 | return ret | ||
| 514 | |||
| 515 | |||
| 516 | if __name__ == "__main__": | ||
| 517 | try: | ||
| 518 | ret = main() | ||
| 519 | except Exception: | ||
| 520 | ret = 1 | ||
| 521 | import traceback | ||
| 522 | traceback.print_exc() | ||
| 523 | sys.exit(ret) | ||
diff --git a/scripts/contrib/improve_kernel_cve_report.py b/scripts/contrib/improve_kernel_cve_report.py new file mode 100755 index 0000000000..5c39df05a5 --- /dev/null +++ b/scripts/contrib/improve_kernel_cve_report.py | |||
| @@ -0,0 +1,473 @@ | |||
| 1 | #! /usr/bin/env python3 | ||
| 2 | # | ||
| 3 | # Copyright OpenEmbedded Contributors | ||
| 4 | # | ||
| 5 | # The script uses another source of CVE information from linux-vulns | ||
| 6 | # to enrich the cve-summary from cve-check or vex. | ||
| 7 | # It can also use the list of compiled files from the kernel spdx to ignore CVEs | ||
| 8 | # that are not affected since the files are not compiled. | ||
| 9 | # | ||
| 10 | # It creates a new json file with updated CVE information | ||
| 11 | # | ||
| 12 | # Compiled files can be extracted adding the following in local.conf | ||
| 13 | # SPDX_INCLUDE_COMPILED_SOURCES:pn-linux-yocto = "1" | ||
| 14 | # | ||
| 15 | # Tested with the following CVE sources: | ||
| 16 | # - https://git.kernel.org/pub/scm/linux/security/vulns.git | ||
| 17 | # - https://github.com/CVEProject/cvelistV5 | ||
| 18 | # | ||
| 19 | # Example: | ||
| 20 | # python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --kernel-version 6.12.27 --datadir ./vulns | ||
| 21 | # python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --datadir ./vulns --old-cve-report build/tmp/log/cve/cve-summary.json | ||
| 22 | # | ||
| 23 | # SPDX-License-Identifier: GPLv2 | ||
| 24 | |||
| 25 | import argparse | ||
| 26 | import json | ||
| 27 | import sys | ||
| 28 | import logging | ||
| 29 | import glob | ||
| 30 | import os | ||
| 31 | import pathlib | ||
| 32 | from packaging.version import Version | ||
| 33 | |||
| 34 | def is_linux_cve(cve_info): | ||
| 35 | '''Return true is the CVE belongs to Linux''' | ||
| 36 | if not "affected" in cve_info["containers"]["cna"]: | ||
| 37 | return False | ||
| 38 | for affected in cve_info["containers"]["cna"]["affected"]: | ||
| 39 | if not "product" in affected: | ||
| 40 | return False | ||
| 41 | if affected["product"] == "Linux" and affected["vendor"] == "Linux": | ||
| 42 | return True | ||
| 43 | return False | ||
| 44 | |||
| 45 | def get_kernel_cves(datadir, compiled_files, version): | ||
| 46 | """ | ||
| 47 | Get CVEs for the kernel | ||
| 48 | """ | ||
| 49 | cves = {} | ||
| 50 | |||
| 51 | check_config = len(compiled_files) > 0 | ||
| 52 | |||
| 53 | base_version = Version(f"{version.major}.{version.minor}") | ||
| 54 | |||
| 55 | # Check all CVES from kernel vulns | ||
| 56 | pattern = os.path.join(datadir, '**', "CVE-*.json") | ||
| 57 | cve_files = glob.glob(pattern, recursive=True) | ||
| 58 | not_applicable_config = 0 | ||
| 59 | fixed_as_later_backport = 0 | ||
| 60 | vulnerable = 0 | ||
| 61 | not_vulnerable = 0 | ||
| 62 | for cve_file in sorted(cve_files): | ||
| 63 | cve_info = {} | ||
| 64 | with open(cve_file, "r", encoding='ISO-8859-1') as f: | ||
| 65 | cve_info = json.load(f) | ||
| 66 | |||
| 67 | if len(cve_info) == 0: | ||
| 68 | logging.error("Not valid data in %s. Aborting", cve_file) | ||
| 69 | break | ||
| 70 | |||
| 71 | if not is_linux_cve(cve_info): | ||
| 72 | continue | ||
| 73 | cve_id = os.path.basename(cve_file)[:-5] | ||
| 74 | description = cve_info["containers"]["cna"]["descriptions"][0]["value"] | ||
| 75 | if cve_file.find("rejected") >= 0: | ||
| 76 | logging.debug("%s is rejected by the CNA", cve_id) | ||
| 77 | cves[cve_id] = { | ||
| 78 | "id": cve_id, | ||
| 79 | "status": "Ignored", | ||
| 80 | "detail": "rejected", | ||
| 81 | "summary": description, | ||
| 82 | "description": f"Rejected by CNA" | ||
| 83 | } | ||
| 84 | continue | ||
| 85 | if any(elem in cve_file for elem in ["review", "reverved", "testing"]): | ||
| 86 | continue | ||
| 87 | |||
| 88 | is_vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected_versions = get_cpe_applicability(cve_info, version) | ||
| 89 | |||
| 90 | logging.debug("%s: %s (%s - %s) (%s - %s)", cve_id, is_vulnerable, better_match_first, better_match_last, first_affected, last_affected) | ||
| 91 | |||
| 92 | if is_vulnerable is None: | ||
| 93 | logging.warning("%s doesn't have good metadata", cve_id) | ||
| 94 | if is_vulnerable: | ||
| 95 | is_affected = True | ||
| 96 | affected_files = [] | ||
| 97 | if check_config: | ||
| 98 | is_affected, affected_files = check_kernel_compiled_files(compiled_files, cve_info) | ||
| 99 | |||
| 100 | if not is_affected and len(affected_files) > 0: | ||
| 101 | logging.debug( | ||
| 102 | "%s - not applicable configuration since affected files not compiled: %s", | ||
| 103 | cve_id, affected_files) | ||
| 104 | cves[cve_id] = { | ||
| 105 | "id": cve_id, | ||
| 106 | "status": "Ignored", | ||
| 107 | "detail": "not-applicable-config", | ||
| 108 | "summary": description, | ||
| 109 | "description": f"Source code not compiled by config. {affected_files}" | ||
| 110 | } | ||
| 111 | not_applicable_config +=1 | ||
| 112 | # Check if we have backport | ||
| 113 | else: | ||
| 114 | if not better_match_last: | ||
| 115 | fixed_in = last_affected | ||
| 116 | else: | ||
| 117 | fixed_in = better_match_last | ||
| 118 | logging.debug("%s needs backporting (fixed from %s)", cve_id, fixed_in) | ||
| 119 | cves[cve_id] = { | ||
| 120 | "id": cve_id, | ||
| 121 | "status": "Unpatched", | ||
| 122 | "detail": "version-in-range", | ||
| 123 | "summary": description, | ||
| 124 | "description": f"Needs backporting (fixed from {fixed_in})" | ||
| 125 | } | ||
| 126 | vulnerable += 1 | ||
| 127 | if (better_match_last and | ||
| 128 | Version(f"{better_match_last.major}.{better_match_last.minor}") == base_version): | ||
| 129 | fixed_as_later_backport += 1 | ||
| 130 | # Not vulnerable | ||
| 131 | else: | ||
| 132 | if not first_affected: | ||
| 133 | logging.debug("%s - not known affected %s", | ||
| 134 | cve_id, | ||
| 135 | better_match_last) | ||
| 136 | cves[cve_id] = { | ||
| 137 | "id": cve_id, | ||
| 138 | "status": "Patched", | ||
| 139 | "detail": "version-not-in-range", | ||
| 140 | "summary": description, | ||
| 141 | "description": "No CPE match" | ||
| 142 | } | ||
| 143 | not_vulnerable += 1 | ||
| 144 | continue | ||
| 145 | backport_base = Version(f"{better_match_last.major}.{better_match_last.minor}") | ||
| 146 | if version < first_affected: | ||
| 147 | logging.debug('%s - fixed-version: only affects %s onwards', | ||
| 148 | cve_id, | ||
| 149 | first_affected) | ||
| 150 | cves[cve_id] = { | ||
| 151 | "id": cve_id, | ||
| 152 | "status": "Patched", | ||
| 153 | "detail": "fixed-version", | ||
| 154 | "summary": description, | ||
| 155 | "description": f"only affects {first_affected} onwards" | ||
| 156 | } | ||
| 157 | not_vulnerable += 1 | ||
| 158 | elif last_affected <= version: | ||
| 159 | logging.debug("%s - fixed-version: Fixed from version %s", | ||
| 160 | cve_id, | ||
| 161 | last_affected) | ||
| 162 | cves[cve_id] = { | ||
| 163 | "id": cve_id, | ||
| 164 | "status": "Patched", | ||
| 165 | "detail": "fixed-version", | ||
| 166 | "summary": description, | ||
| 167 | "description": f"fixed-version: Fixed from version {last_affected}" | ||
| 168 | } | ||
| 169 | not_vulnerable += 1 | ||
| 170 | elif backport_base == base_version: | ||
| 171 | logging.debug("%s - cpe-stable-backport: Backported in %s", | ||
| 172 | cve_id, | ||
| 173 | better_match_last) | ||
| 174 | cves[cve_id] = { | ||
| 175 | "id": cve_id, | ||
| 176 | "status": "Patched", | ||
| 177 | "detail": "cpe-stable-backport", | ||
| 178 | "summary": description, | ||
| 179 | "description": f"Backported in {better_match_last}" | ||
| 180 | } | ||
| 181 | not_vulnerable += 1 | ||
| 182 | else: | ||
| 183 | logging.debug("%s - version not affected %s", cve_id, str(affected_versions)) | ||
| 184 | cves[cve_id] = { | ||
| 185 | "id": cve_id, | ||
| 186 | "status": "Patched", | ||
| 187 | "detail": "version-not-in-range", | ||
| 188 | "summary": description, | ||
| 189 | "description": f"Range {affected_versions}" | ||
| 190 | } | ||
| 191 | not_vulnerable += 1 | ||
| 192 | |||
| 193 | logging.info("Total CVEs ignored due to not applicable config: %d", not_applicable_config) | ||
| 194 | logging.info("Total CVEs not vulnerable due version-not-in-range: %d", not_vulnerable) | ||
| 195 | logging.info("Total vulnerable CVEs: %d", vulnerable) | ||
| 196 | |||
| 197 | logging.info("Total CVEs already backported in %s: %s", base_version, | ||
| 198 | fixed_as_later_backport) | ||
| 199 | return cves | ||
| 200 | |||
| 201 | def read_spdx(spdx_file): | ||
| 202 | '''Open SPDX file and extract compiled files''' | ||
| 203 | with open(spdx_file, 'r', encoding='ISO-8859-1') as f: | ||
| 204 | spdx = json.load(f) | ||
| 205 | if "spdxVersion" in spdx: | ||
| 206 | if spdx["spdxVersion"] == "SPDX-2.2": | ||
| 207 | return read_spdx2(spdx) | ||
| 208 | if "@graph" in spdx: | ||
| 209 | return read_spdx3(spdx) | ||
| 210 | return [] | ||
| 211 | |||
| 212 | def read_spdx2(spdx): | ||
| 213 | ''' | ||
| 214 | Read spdx2 compiled files from spdx | ||
| 215 | ''' | ||
| 216 | cfiles = set() | ||
| 217 | if 'files' not in spdx: | ||
| 218 | return cfiles | ||
| 219 | for item in spdx['files']: | ||
| 220 | for ftype in item['fileTypes']: | ||
| 221 | if ftype == "SOURCE": | ||
| 222 | filename = item["fileName"][item["fileName"].find("/")+1:] | ||
| 223 | cfiles.add(filename) | ||
| 224 | return cfiles | ||
| 225 | |||
| 226 | def read_spdx3(spdx): | ||
| 227 | ''' | ||
| 228 | Read spdx3 compiled files from spdx | ||
| 229 | ''' | ||
| 230 | cfiles = set() | ||
| 231 | for item in spdx["@graph"]: | ||
| 232 | if "software_primaryPurpose" not in item: | ||
| 233 | continue | ||
| 234 | if item["software_primaryPurpose"] == "source": | ||
| 235 | filename = item['name'][item['name'].find("/")+1:] | ||
| 236 | cfiles.add(filename) | ||
| 237 | return cfiles | ||
| 238 | |||
| 239 | def check_kernel_compiled_files(compiled_files, cve_info): | ||
| 240 | """ | ||
| 241 | Return if a CVE affected us depending on compiled files | ||
| 242 | """ | ||
| 243 | files_affected = set() | ||
| 244 | is_affected = False | ||
| 245 | |||
| 246 | for item in cve_info['containers']['cna']['affected']: | ||
| 247 | if "programFiles" in item: | ||
| 248 | for f in item['programFiles']: | ||
| 249 | if f not in files_affected: | ||
| 250 | files_affected.add(f) | ||
| 251 | |||
| 252 | if len(files_affected) > 0: | ||
| 253 | for f in files_affected: | ||
| 254 | if f in compiled_files: | ||
| 255 | logging.debug("File match: %s", f) | ||
| 256 | is_affected = True | ||
| 257 | return is_affected, files_affected | ||
| 258 | |||
| 259 | def get_cpe_applicability(cve_info, v): | ||
| 260 | ''' | ||
| 261 | Check if version is affected and return affected versions | ||
| 262 | ''' | ||
| 263 | base_branch = Version(f"{v.major}.{v.minor}") | ||
| 264 | affected = [] | ||
| 265 | if not 'cpeApplicability' in cve_info["containers"]["cna"]: | ||
| 266 | return None, None, None, None, None, None | ||
| 267 | |||
| 268 | for nodes in cve_info["containers"]["cna"]["cpeApplicability"]: | ||
| 269 | for node in nodes.values(): | ||
| 270 | vulnerable = False | ||
| 271 | matched_branch = False | ||
| 272 | first_affected = Version("5000") | ||
| 273 | last_affected = Version("0") | ||
| 274 | better_match_first = Version("0") | ||
| 275 | better_match_last = Version("5000") | ||
| 276 | |||
| 277 | if len(node[0]['cpeMatch']) == 0: | ||
| 278 | first_affected = None | ||
| 279 | last_affected = None | ||
| 280 | better_match_first = None | ||
| 281 | better_match_last = None | ||
| 282 | |||
| 283 | for cpe_match in node[0]['cpeMatch']: | ||
| 284 | version_start_including = Version("0") | ||
| 285 | version_end_excluding = Version("0") | ||
| 286 | if 'versionStartIncluding' in cpe_match: | ||
| 287 | version_start_including = Version(cpe_match['versionStartIncluding']) | ||
| 288 | else: | ||
| 289 | version_start_including = Version("0") | ||
| 290 | # if versionEndExcluding is missing we are in a branch, which is not fixed. | ||
| 291 | if "versionEndExcluding" in cpe_match: | ||
| 292 | version_end_excluding = Version(cpe_match["versionEndExcluding"]) | ||
| 293 | else: | ||
| 294 | # if versionEndExcluding is missing we are in a branch, which is not fixed. | ||
| 295 | version_end_excluding = Version( | ||
| 296 | f"{version_start_including.major}.{version_start_including.minor}.5000" | ||
| 297 | ) | ||
| 298 | affected.append(f" {version_start_including}-{version_end_excluding}") | ||
| 299 | # Detect if versionEnd is in fixed in base branch. It has precedence over the rest | ||
| 300 | branch_end = Version(f"{version_end_excluding.major}.{version_end_excluding.minor}") | ||
| 301 | if branch_end == base_branch: | ||
| 302 | if version_start_including <= v < version_end_excluding: | ||
| 303 | vulnerable = cpe_match['vulnerable'] | ||
| 304 | # If we don't match in our branch, we are not vulnerable, | ||
| 305 | # since we have a backport | ||
| 306 | matched_branch = True | ||
| 307 | better_match_first = version_start_including | ||
| 308 | better_match_last = version_end_excluding | ||
| 309 | if version_start_including <= v < version_end_excluding and not matched_branch: | ||
| 310 | if version_end_excluding < better_match_last: | ||
| 311 | better_match_first = max(version_start_including, better_match_first) | ||
| 312 | better_match_last = min(better_match_last, version_end_excluding) | ||
| 313 | vulnerable = cpe_match['vulnerable'] | ||
| 314 | matched_branch = True | ||
| 315 | |||
| 316 | first_affected = min(version_start_including, first_affected) | ||
| 317 | last_affected = max(version_end_excluding, last_affected) | ||
| 318 | # Not a better match, we use the first and last affected instead of the fake .5000 | ||
| 319 | if vulnerable and better_match_last == Version(f"{base_branch}.5000"): | ||
| 320 | better_match_last = last_affected | ||
| 321 | better_match_first = first_affected | ||
| 322 | return vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected | ||
| 323 | |||
| 324 | def copy_data(old, new): | ||
| 325 | '''Update dictionary with new entries, while keeping the old ones''' | ||
| 326 | for k in new.keys(): | ||
| 327 | old[k] = new[k] | ||
| 328 | return old | ||
| 329 | |||
| 330 | # Function taken from cve_check.bbclass. Adapted to cve fields | ||
| 331 | def cve_update(cve_data, cve, entry): | ||
| 332 | # If no entry, just add it | ||
| 333 | if cve not in cve_data: | ||
| 334 | cve_data[cve] = entry | ||
| 335 | return | ||
| 336 | # If we are updating, there might be change in the status | ||
| 337 | if cve_data[cve]['status'] == "Unknown": | ||
| 338 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
| 339 | return | ||
| 340 | if cve_data[cve]['status'] == entry['status']: | ||
| 341 | return | ||
| 342 | if entry['status'] == "Unpatched" and cve_data[cve]['status'] == "Patched": | ||
| 343 | # Backported-patch (e.g. vendor kernel repo with cherry-picked CVE patch) | ||
| 344 | # has priority over unpatch from CNA | ||
| 345 | if cve_data[cve]['detail'] == "backported-patch": | ||
| 346 | return | ||
| 347 | logging.warning("CVE entry %s update from Patched to Unpatched from the scan result", cve) | ||
| 348 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
| 349 | return | ||
| 350 | if entry['status'] == "Patched" and cve_data[cve]['status'] == "Unpatched": | ||
| 351 | logging.warning("CVE entry %s update from Unpatched to Patched from the scan result", cve) | ||
| 352 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
| 353 | return | ||
| 354 | # If we have an "Ignored", it has a priority | ||
| 355 | if cve_data[cve]['status'] == "Ignored": | ||
| 356 | logging.debug("CVE %s not updating because Ignored", cve) | ||
| 357 | return | ||
| 358 | # If we have an "Ignored", it has a priority | ||
| 359 | if entry['status'] == "Ignored": | ||
| 360 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
| 361 | logging.debug("CVE entry %s updated from Unpatched to Ignored", cve) | ||
| 362 | return | ||
| 363 | logging.warning("Unhandled CVE entry update for %s %s from %s %s to %s", | ||
| 364 | cve, cve_data[cve]['status'], cve_data[cve]['detail'], entry['status'], entry['detail']) | ||
| 365 | |||
| 366 | def main(): | ||
| 367 | parser = argparse.ArgumentParser( | ||
| 368 | description="Update cve-summary with kernel compiled files and kernel CVE information" | ||
| 369 | ) | ||
| 370 | parser.add_argument( | ||
| 371 | "-s", | ||
| 372 | "--spdx", | ||
| 373 | help="SPDX2/3 for the kernel. Needs to include compiled sources", | ||
| 374 | ) | ||
| 375 | parser.add_argument( | ||
| 376 | "--datadir", | ||
| 377 | type=pathlib.Path, | ||
| 378 | help="Directory where CVE data is", | ||
| 379 | required=True | ||
| 380 | ) | ||
| 381 | parser.add_argument( | ||
| 382 | "--old-cve-report", | ||
| 383 | help="CVE report to update. (Optional)", | ||
| 384 | ) | ||
| 385 | parser.add_argument( | ||
| 386 | "--kernel-version", | ||
| 387 | help="Kernel version. Needed if old cve_report is not provided (Optional)", | ||
| 388 | type=Version | ||
| 389 | ) | ||
| 390 | parser.add_argument( | ||
| 391 | "--new-cve-report", | ||
| 392 | help="Output file", | ||
| 393 | default="cve-summary-enhance.json" | ||
| 394 | ) | ||
| 395 | parser.add_argument( | ||
| 396 | "-D", | ||
| 397 | "--debug", | ||
| 398 | help='Enable debug ', | ||
| 399 | action="store_true") | ||
| 400 | |||
| 401 | args = parser.parse_args() | ||
| 402 | |||
| 403 | if args.debug: | ||
| 404 | log_level=logging.DEBUG | ||
| 405 | else: | ||
| 406 | log_level=logging.INFO | ||
| 407 | logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', level=log_level) | ||
| 408 | |||
| 409 | if not args.kernel_version and not args.old_cve_report: | ||
| 410 | parser.error("either --kernel-version or --old-cve-report are needed") | ||
| 411 | return -1 | ||
| 412 | |||
| 413 | # by default we don't check the compiled files, unless provided | ||
| 414 | compiled_files = [] | ||
| 415 | if args.spdx: | ||
| 416 | compiled_files = read_spdx(args.spdx) | ||
| 417 | logging.info("Total compiled files %d", len(compiled_files)) | ||
| 418 | |||
| 419 | if args.old_cve_report: | ||
| 420 | with open(args.old_cve_report, encoding='ISO-8859-1') as f: | ||
| 421 | cve_report = json.load(f) | ||
| 422 | else: | ||
| 423 | #If summary not provided, we create one | ||
| 424 | cve_report = { | ||
| 425 | "version": "1", | ||
| 426 | "package": [ | ||
| 427 | { | ||
| 428 | "name": "linux-yocto", | ||
| 429 | "version": str(args.kernel_version), | ||
| 430 | "products": [ | ||
| 431 | { | ||
| 432 | "product": "linux_kernel", | ||
| 433 | "cvesInRecord": "Yes" | ||
| 434 | } | ||
| 435 | ], | ||
| 436 | "issue": [] | ||
| 437 | } | ||
| 438 | ] | ||
| 439 | } | ||
| 440 | |||
| 441 | for pkg in cve_report['package']: | ||
| 442 | is_kernel = False | ||
| 443 | for product in pkg['products']: | ||
| 444 | if product['product'] == "linux_kernel": | ||
| 445 | is_kernel=True | ||
| 446 | if not is_kernel: | ||
| 447 | continue | ||
| 448 | # We remove custom versions after - | ||
| 449 | upstream_version = Version(pkg["version"].split("-")[0]) | ||
| 450 | logging.info("Checking kernel %s", upstream_version) | ||
| 451 | kernel_cves = get_kernel_cves(args.datadir, | ||
| 452 | compiled_files, | ||
| 453 | upstream_version) | ||
| 454 | logging.info("Total kernel cves from kernel CNA: %s", len(kernel_cves)) | ||
| 455 | cves = {issue["id"]: issue for issue in pkg["issue"]} | ||
| 456 | logging.info("Total kernel before processing cves: %s", len(cves)) | ||
| 457 | |||
| 458 | for cve in kernel_cves: | ||
| 459 | cve_update(cves, cve, kernel_cves[cve]) | ||
| 460 | |||
| 461 | pkg["issue"] = [] | ||
| 462 | for cve in sorted(cves): | ||
| 463 | pkg["issue"].extend([cves[cve]]) | ||
| 464 | logging.info("Total kernel cves after processing: %s", len(pkg['issue'])) | ||
| 465 | |||
| 466 | with open(args.new_cve_report, "w", encoding='ISO-8859-1') as f: | ||
| 467 | json.dump(cve_report, f, indent=2) | ||
| 468 | |||
| 469 | return 0 | ||
| 470 | |||
| 471 | if __name__ == "__main__": | ||
| 472 | sys.exit(main()) | ||
| 473 | |||
diff --git a/scripts/contrib/list-packageconfig-flags.py b/scripts/contrib/list-packageconfig-flags.py index d6de4dc84d..bb288e9099 100755 --- a/scripts/contrib/list-packageconfig-flags.py +++ b/scripts/contrib/list-packageconfig-flags.py | |||
| @@ -33,7 +33,7 @@ import bb.tinfoil | |||
| 33 | def get_fnlist(bbhandler, pkg_pn, preferred): | 33 | def get_fnlist(bbhandler, pkg_pn, preferred): |
| 34 | ''' Get all recipe file names ''' | 34 | ''' Get all recipe file names ''' |
| 35 | if preferred: | 35 | if preferred: |
| 36 | (latest_versions, preferred_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecaches[''], pkg_pn) | 36 | (latest_versions, preferred_versions, required_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecaches[''], pkg_pn) |
| 37 | 37 | ||
| 38 | fn_list = [] | 38 | fn_list = [] |
| 39 | for pn in sorted(pkg_pn): | 39 | for pn in sorted(pkg_pn): |
diff --git a/scripts/contrib/make-spdx-bindings.sh b/scripts/contrib/make-spdx-bindings.sh new file mode 100755 index 0000000000..31caaf339d --- /dev/null +++ b/scripts/contrib/make-spdx-bindings.sh | |||
| @@ -0,0 +1,12 @@ | |||
| 1 | #! /bin/sh | ||
| 2 | # | ||
| 3 | # SPDX-License-Identifier: MIT | ||
| 4 | |||
| 5 | THIS_DIR="$(dirname "$0")" | ||
| 6 | |||
| 7 | VERSION="3.0.1" | ||
| 8 | |||
| 9 | shacl2code generate --input https://spdx.org/rdf/$VERSION/spdx-model.ttl \ | ||
| 10 | --input https://spdx.org/rdf/$VERSION/spdx-json-serialize-annotations.ttl \ | ||
| 11 | --context https://spdx.org/rdf/$VERSION/spdx-context.jsonld \ | ||
| 12 | python -o $THIS_DIR/../../meta/lib/oe/spdx30.py | ||
diff --git a/scripts/contrib/oe-build-perf-report-email.py b/scripts/contrib/oe-build-perf-report-email.py index de3862c897..7192113c28 100755 --- a/scripts/contrib/oe-build-perf-report-email.py +++ b/scripts/contrib/oe-build-perf-report-email.py | |||
| @@ -19,8 +19,6 @@ import socket | |||
| 19 | import subprocess | 19 | import subprocess |
| 20 | import sys | 20 | import sys |
| 21 | import tempfile | 21 | import tempfile |
| 22 | from email.mime.image import MIMEImage | ||
| 23 | from email.mime.multipart import MIMEMultipart | ||
| 24 | from email.mime.text import MIMEText | 22 | from email.mime.text import MIMEText |
| 25 | 23 | ||
| 26 | 24 | ||
| @@ -29,30 +27,6 @@ logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") | |||
| 29 | log = logging.getLogger('oe-build-perf-report') | 27 | log = logging.getLogger('oe-build-perf-report') |
| 30 | 28 | ||
| 31 | 29 | ||
| 32 | # Find js scaper script | ||
| 33 | SCRAPE_JS = os.path.join(os.path.dirname(__file__), '..', 'lib', 'build_perf', | ||
| 34 | 'scrape-html-report.js') | ||
| 35 | if not os.path.isfile(SCRAPE_JS): | ||
| 36 | log.error("Unableto find oe-build-perf-report-scrape.js") | ||
| 37 | sys.exit(1) | ||
| 38 | |||
| 39 | |||
| 40 | class ReportError(Exception): | ||
| 41 | """Local errors""" | ||
| 42 | pass | ||
| 43 | |||
| 44 | |||
| 45 | def check_utils(): | ||
| 46 | """Check that all needed utils are installed in the system""" | ||
| 47 | missing = [] | ||
| 48 | for cmd in ('phantomjs', 'optipng'): | ||
| 49 | if not shutil.which(cmd): | ||
| 50 | missing.append(cmd) | ||
| 51 | if missing: | ||
| 52 | log.error("The following tools are missing: %s", ' '.join(missing)) | ||
| 53 | sys.exit(1) | ||
| 54 | |||
| 55 | |||
| 56 | def parse_args(argv): | 30 | def parse_args(argv): |
| 57 | """Parse command line arguments""" | 31 | """Parse command line arguments""" |
| 58 | description = """Email build perf test report""" | 32 | description = """Email build perf test report""" |
| @@ -77,137 +51,19 @@ def parse_args(argv): | |||
| 77 | "the email parts") | 51 | "the email parts") |
| 78 | parser.add_argument('--text', | 52 | parser.add_argument('--text', |
| 79 | help="Plain text message") | 53 | help="Plain text message") |
| 80 | parser.add_argument('--html', | ||
| 81 | help="HTML peport generated by oe-build-perf-report") | ||
| 82 | parser.add_argument('--phantomjs-args', action='append', | ||
| 83 | help="Extra command line arguments passed to PhantomJS") | ||
| 84 | 54 | ||
| 85 | args = parser.parse_args(argv) | 55 | args = parser.parse_args(argv) |
| 86 | 56 | ||
| 87 | if not args.html and not args.text: | 57 | if not args.text: |
| 88 | parser.error("Please specify --html and/or --text") | 58 | parser.error("Please specify --text") |
| 89 | 59 | ||
| 90 | return args | 60 | return args |
| 91 | 61 | ||
| 92 | 62 | ||
| 93 | def decode_png(infile, outfile): | 63 | def send_email(text_fn, subject, recipients, copy=[], blind_copy=[]): |
| 94 | """Parse/decode/optimize png data from a html element""" | ||
| 95 | with open(infile) as f: | ||
| 96 | raw_data = f.read() | ||
| 97 | |||
| 98 | # Grab raw base64 data | ||
| 99 | b64_data = re.sub('^.*href="data:image/png;base64,', '', raw_data, 1) | ||
| 100 | b64_data = re.sub('">.+$', '', b64_data, 1) | ||
| 101 | |||
| 102 | # Replace file with proper decoded png | ||
| 103 | with open(outfile, 'wb') as f: | ||
| 104 | f.write(base64.b64decode(b64_data)) | ||
| 105 | |||
| 106 | subprocess.check_output(['optipng', outfile], stderr=subprocess.STDOUT) | ||
| 107 | |||
| 108 | |||
| 109 | def mangle_html_report(infile, outfile, pngs): | ||
| 110 | """Mangle html file into a email compatible format""" | ||
| 111 | paste = True | ||
| 112 | png_dir = os.path.dirname(outfile) | ||
| 113 | with open(infile) as f_in: | ||
| 114 | with open(outfile, 'w') as f_out: | ||
| 115 | for line in f_in.readlines(): | ||
| 116 | stripped = line.strip() | ||
| 117 | # Strip out scripts | ||
| 118 | if stripped == '<!--START-OF-SCRIPTS-->': | ||
| 119 | paste = False | ||
| 120 | elif stripped == '<!--END-OF-SCRIPTS-->': | ||
| 121 | paste = True | ||
| 122 | elif paste: | ||
| 123 | if re.match('^.+href="data:image/png;base64', stripped): | ||
| 124 | # Strip out encoded pngs (as they're huge in size) | ||
| 125 | continue | ||
| 126 | elif 'www.gstatic.com' in stripped: | ||
| 127 | # HACK: drop references to external static pages | ||
| 128 | continue | ||
| 129 | |||
| 130 | # Replace charts with <img> elements | ||
| 131 | match = re.match('<div id="(?P<id>\w+)"', stripped) | ||
| 132 | if match and match.group('id') in pngs: | ||
| 133 | f_out.write('<img src="cid:{}"\n'.format(match.group('id'))) | ||
| 134 | else: | ||
| 135 | f_out.write(line) | ||
| 136 | |||
| 137 | |||
| 138 | def scrape_html_report(report, outdir, phantomjs_extra_args=None): | ||
| 139 | """Scrape html report into a format sendable by email""" | ||
| 140 | tmpdir = tempfile.mkdtemp(dir='.') | ||
| 141 | log.debug("Using tmpdir %s for phantomjs output", tmpdir) | ||
| 142 | |||
| 143 | if not os.path.isdir(outdir): | ||
| 144 | os.mkdir(outdir) | ||
| 145 | if os.path.splitext(report)[1] not in ('.html', '.htm'): | ||
| 146 | raise ReportError("Invalid file extension for report, needs to be " | ||
| 147 | "'.html' or '.htm'") | ||
| 148 | |||
| 149 | try: | ||
| 150 | log.info("Scraping HTML report with PhangomJS") | ||
| 151 | extra_args = phantomjs_extra_args if phantomjs_extra_args else [] | ||
| 152 | subprocess.check_output(['phantomjs', '--debug=true'] + extra_args + | ||
| 153 | [SCRAPE_JS, report, tmpdir], | ||
| 154 | stderr=subprocess.STDOUT) | ||
| 155 | |||
| 156 | pngs = [] | ||
| 157 | images = [] | ||
| 158 | for fname in os.listdir(tmpdir): | ||
| 159 | base, ext = os.path.splitext(fname) | ||
| 160 | if ext == '.png': | ||
| 161 | log.debug("Decoding %s", fname) | ||
| 162 | decode_png(os.path.join(tmpdir, fname), | ||
| 163 | os.path.join(outdir, fname)) | ||
| 164 | pngs.append(base) | ||
| 165 | images.append(fname) | ||
| 166 | elif ext in ('.html', '.htm'): | ||
| 167 | report_file = fname | ||
| 168 | else: | ||
| 169 | log.warning("Unknown file extension: '%s'", ext) | ||
| 170 | #shutil.move(os.path.join(tmpdir, fname), outdir) | ||
| 171 | |||
| 172 | log.debug("Mangling html report file %s", report_file) | ||
| 173 | mangle_html_report(os.path.join(tmpdir, report_file), | ||
| 174 | os.path.join(outdir, report_file), pngs) | ||
| 175 | return (os.path.join(outdir, report_file), | ||
| 176 | [os.path.join(outdir, i) for i in images]) | ||
| 177 | finally: | ||
| 178 | shutil.rmtree(tmpdir) | ||
| 179 | |||
| 180 | def send_email(text_fn, html_fn, image_fns, subject, recipients, copy=[], | ||
| 181 | blind_copy=[]): | ||
| 182 | """Send email""" | ||
| 183 | # Generate email message | 64 | # Generate email message |
| 184 | text_msg = html_msg = None | 65 | with open(text_fn) as f: |
| 185 | if text_fn: | 66 | msg = MIMEText("Yocto build performance test report.\n" + f.read(), 'plain') |
| 186 | with open(text_fn) as f: | ||
| 187 | text_msg = MIMEText("Yocto build performance test report.\n" + | ||
| 188 | f.read(), 'plain') | ||
| 189 | if html_fn: | ||
| 190 | html_msg = msg = MIMEMultipart('related') | ||
| 191 | with open(html_fn) as f: | ||
| 192 | html_msg.attach(MIMEText(f.read(), 'html')) | ||
| 193 | for img_fn in image_fns: | ||
| 194 | # Expect that content id is same as the filename | ||
| 195 | cid = os.path.splitext(os.path.basename(img_fn))[0] | ||
| 196 | with open(img_fn, 'rb') as f: | ||
| 197 | image_msg = MIMEImage(f.read()) | ||
| 198 | image_msg['Content-ID'] = '<{}>'.format(cid) | ||
| 199 | html_msg.attach(image_msg) | ||
| 200 | |||
| 201 | if text_msg and html_msg: | ||
| 202 | msg = MIMEMultipart('alternative') | ||
| 203 | msg.attach(text_msg) | ||
| 204 | msg.attach(html_msg) | ||
| 205 | elif text_msg: | ||
| 206 | msg = text_msg | ||
| 207 | elif html_msg: | ||
| 208 | msg = html_msg | ||
| 209 | else: | ||
| 210 | raise ReportError("Neither plain text nor html body specified") | ||
| 211 | 67 | ||
| 212 | pw_data = pwd.getpwuid(os.getuid()) | 68 | pw_data = pwd.getpwuid(os.getuid()) |
| 213 | full_name = pw_data.pw_gecos.split(',')[0] | 69 | full_name = pw_data.pw_gecos.split(',')[0] |
| @@ -234,8 +90,6 @@ def main(argv=None): | |||
| 234 | if args.debug: | 90 | if args.debug: |
| 235 | log.setLevel(logging.DEBUG) | 91 | log.setLevel(logging.DEBUG) |
| 236 | 92 | ||
| 237 | check_utils() | ||
| 238 | |||
| 239 | if args.outdir: | 93 | if args.outdir: |
| 240 | outdir = args.outdir | 94 | outdir = args.outdir |
| 241 | if not os.path.exists(outdir): | 95 | if not os.path.exists(outdir): |
| @@ -245,25 +99,16 @@ def main(argv=None): | |||
| 245 | 99 | ||
| 246 | try: | 100 | try: |
| 247 | log.debug("Storing email parts in %s", outdir) | 101 | log.debug("Storing email parts in %s", outdir) |
| 248 | html_report = images = None | ||
| 249 | if args.html: | ||
| 250 | html_report, images = scrape_html_report(args.html, outdir, | ||
| 251 | args.phantomjs_args) | ||
| 252 | |||
| 253 | if args.to: | 102 | if args.to: |
| 254 | log.info("Sending email to %s", ', '.join(args.to)) | 103 | log.info("Sending email to %s", ', '.join(args.to)) |
| 255 | if args.cc: | 104 | if args.cc: |
| 256 | log.info("Copying to %s", ', '.join(args.cc)) | 105 | log.info("Copying to %s", ', '.join(args.cc)) |
| 257 | if args.bcc: | 106 | if args.bcc: |
| 258 | log.info("Blind copying to %s", ', '.join(args.bcc)) | 107 | log.info("Blind copying to %s", ', '.join(args.bcc)) |
| 259 | send_email(args.text, html_report, images, args.subject, | 108 | send_email(args.text, args.subject, args.to, args.cc, args.bcc) |
| 260 | args.to, args.cc, args.bcc) | ||
| 261 | except subprocess.CalledProcessError as err: | 109 | except subprocess.CalledProcessError as err: |
| 262 | log.error("%s, with output:\n%s", str(err), err.output.decode()) | 110 | log.error("%s, with output:\n%s", str(err), err.output.decode()) |
| 263 | return 1 | 111 | return 1 |
| 264 | except ReportError as err: | ||
| 265 | log.error(err) | ||
| 266 | return 1 | ||
| 267 | finally: | 112 | finally: |
| 268 | if not args.outdir: | 113 | if not args.outdir: |
| 269 | log.debug("Wiping %s", outdir) | 114 | log.debug("Wiping %s", outdir) |
diff --git a/scripts/contrib/oe-image-files-spdx/.gitignore b/scripts/contrib/oe-image-files-spdx/.gitignore new file mode 100644 index 0000000000..285851c984 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/.gitignore | |||
| @@ -0,0 +1,8 @@ | |||
| 1 | *.spdx.json | ||
| 2 | *.pyc | ||
| 3 | *.bak | ||
| 4 | *.swp | ||
| 5 | *.swo | ||
| 6 | *.swn | ||
| 7 | venv/* | ||
| 8 | .venv/* | ||
diff --git a/scripts/contrib/oe-image-files-spdx/README.md b/scripts/contrib/oe-image-files-spdx/README.md new file mode 100644 index 0000000000..44f76eacd8 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/README.md | |||
| @@ -0,0 +1,24 @@ | |||
| 1 | # OE Image Files from SBoM | ||
| 2 | |||
| 3 | This is an example python script that will list the packaged files with their | ||
| 4 | checksums based on the SPDX 3.0.1 SBoM. | ||
| 5 | |||
| 6 | It can be used as a template for other programs to investigate output based on | ||
| 7 | OE SPDX SBoMs | ||
| 8 | |||
| 9 | ## Installation | ||
| 10 | |||
| 11 | This project can be installed using an virtual environment: | ||
| 12 | ``` | ||
| 13 | python3 -m venv .venv | ||
| 14 | .venv/bin/activate | ||
| 15 | python3 -m pip install -e '.[dev]' | ||
| 16 | ``` | ||
| 17 | |||
| 18 | ## Usage | ||
| 19 | |||
| 20 | After installing, the `oe-image-files` program can be used to show the files, e.g.: | ||
| 21 | |||
| 22 | ``` | ||
| 23 | oe-image-files core-image-minimal-qemux86-64.rootfs.spdx.json | ||
| 24 | ``` | ||
diff --git a/scripts/contrib/oe-image-files-spdx/pyproject.toml b/scripts/contrib/oe-image-files-spdx/pyproject.toml new file mode 100644 index 0000000000..3fab5dd605 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/pyproject.toml | |||
| @@ -0,0 +1,23 @@ | |||
| 1 | [project] | ||
| 2 | name = "oe-image-files" | ||
| 3 | description = "Displays all packaged files on the root file system" | ||
| 4 | dynamic = ["version"] | ||
| 5 | requires-python = ">= 3.8" | ||
| 6 | readme = "README.md" | ||
| 7 | |||
| 8 | dependencies = [ | ||
| 9 | "spdx_python_model @ git+https://github.com/spdx/spdx-python-model.git@aa40861f11d1b5d20edba7101835341a70d91179", | ||
| 10 | ] | ||
| 11 | |||
| 12 | [project.scripts] | ||
| 13 | oe-image-files = "oe_image_files:main" | ||
| 14 | |||
| 15 | [build-system] | ||
| 16 | requires = ["hatchling"] | ||
| 17 | build-backend = "hatchling.build" | ||
| 18 | |||
| 19 | [tool.hatch.version] | ||
| 20 | path = "src/oe_image_files/version.py" | ||
| 21 | |||
| 22 | [tool.hatch.metadata] | ||
| 23 | allow-direct-references = true | ||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py new file mode 100644 index 0000000000..c28a133f2d --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py | |||
| @@ -0,0 +1 @@ | |||
| from .main import main | |||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py new file mode 100644 index 0000000000..8476bf6369 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py | |||
| @@ -0,0 +1,86 @@ | |||
| 1 | # SPDX-License-Identifier: MIT | ||
| 2 | |||
| 3 | import argparse | ||
| 4 | from pathlib import Path | ||
| 5 | |||
| 6 | |||
| 7 | from spdx_python_model import v3_0_1 as spdx_3_0_1 | ||
| 8 | from .version import VERSION | ||
| 9 | |||
| 10 | |||
| 11 | def main(): | ||
| 12 | parser = argparse.ArgumentParser( | ||
| 13 | description="Show the packaged files and checksums in an OE image from the SPDX SBoM" | ||
| 14 | ) | ||
| 15 | parser.add_argument("file", help="SPDX 3 input file", type=Path) | ||
| 16 | parser.add_argument("--version", "-V", action="version", version=VERSION) | ||
| 17 | |||
| 18 | args = parser.parse_args() | ||
| 19 | |||
| 20 | # Load SPDX data from file into a new object set | ||
| 21 | objset = spdx_3_0_1.SHACLObjectSet() | ||
| 22 | with args.file.open("r") as f: | ||
| 23 | d = spdx_3_0_1.JSONLDDeserializer() | ||
| 24 | d.read(f, objset) | ||
| 25 | |||
| 26 | # Find the top level SPDX Document object | ||
| 27 | for o in objset.foreach_type(spdx_3_0_1.SpdxDocument): | ||
| 28 | doc = o | ||
| 29 | break | ||
| 30 | else: | ||
| 31 | print("ERROR: No SPDX Document found!") | ||
| 32 | return 1 | ||
| 33 | |||
| 34 | # Find the root SBoM in the document | ||
| 35 | for o in doc.rootElement: | ||
| 36 | if isinstance(o, spdx_3_0_1.software_Sbom): | ||
| 37 | sbom = o | ||
| 38 | break | ||
| 39 | else: | ||
| 40 | print("ERROR: SBoM not found in document") | ||
| 41 | return 1 | ||
| 42 | |||
| 43 | # Find the root file system package in the SBoM | ||
| 44 | for o in sbom.rootElement: | ||
| 45 | if ( | ||
| 46 | isinstance(o, spdx_3_0_1.software_Package) | ||
| 47 | and o.software_primaryPurpose == spdx_3_0_1.software_SoftwarePurpose.archive | ||
| 48 | ): | ||
| 49 | root_package = o | ||
| 50 | break | ||
| 51 | else: | ||
| 52 | print("ERROR: Package not found in document") | ||
| 53 | return 1 | ||
| 54 | |||
| 55 | # Find all relationships of type "contains" that go FROM the root file | ||
| 56 | # system | ||
| 57 | files = [] | ||
| 58 | for rel in objset.foreach_type(spdx_3_0_1.Relationship): | ||
| 59 | if not rel.relationshipType == spdx_3_0_1.RelationshipType.contains: | ||
| 60 | continue | ||
| 61 | |||
| 62 | if not rel.from_ is root_package: | ||
| 63 | continue | ||
| 64 | |||
| 65 | # Iterate over all files in the TO of the relationship | ||
| 66 | for o in rel.to: | ||
| 67 | if not isinstance(o, spdx_3_0_1.software_File): | ||
| 68 | continue | ||
| 69 | |||
| 70 | # Find the SHA 256 hash of the file (if any) | ||
| 71 | for h in o.verifiedUsing: | ||
| 72 | if ( | ||
| 73 | isinstance(h, spdx_3_0_1.Hash) | ||
| 74 | and h.algorithm == spdx_3_0_1.HashAlgorithm.sha256 | ||
| 75 | ): | ||
| 76 | files.append((o.name, h.hashValue)) | ||
| 77 | break | ||
| 78 | else: | ||
| 79 | files.append((o.name, "")) | ||
| 80 | |||
| 81 | # Print files | ||
| 82 | files.sort(key=lambda x: x[0]) | ||
| 83 | for name, hash_val in files: | ||
| 84 | print(f"{name} - {hash_val}") | ||
| 85 | |||
| 86 | return 0 | ||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py new file mode 100644 index 0000000000..901e5110b2 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py | |||
| @@ -0,0 +1 @@ | |||
| VERSION = "0.0.1" | |||
diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py index 62c509f51c..d8d7b214e5 100755 --- a/scripts/contrib/patchreview.py +++ b/scripts/contrib/patchreview.py | |||
| @@ -1,14 +1,29 @@ | |||
| 1 | #! /usr/bin/env python3 | 1 | #! /usr/bin/env python3 |
| 2 | # | 2 | # |
| 3 | # Copyright OpenEmbedded Contributors | ||
| 4 | # | ||
| 3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
| 4 | # | 6 | # |
| 5 | 7 | ||
| 8 | import argparse | ||
| 9 | import collections | ||
| 10 | import json | ||
| 11 | import os | ||
| 12 | import os.path | ||
| 13 | import pathlib | ||
| 14 | import re | ||
| 15 | import subprocess | ||
| 16 | |||
| 17 | import sys | ||
| 18 | sys.path.append(os.path.join(sys.path[0], '../../meta/lib')) | ||
| 19 | import oe.qa | ||
| 20 | |||
| 6 | # TODO | 21 | # TODO |
| 7 | # - option to just list all broken files | 22 | # - option to just list all broken files |
| 8 | # - test suite | 23 | # - test suite |
| 9 | # - validate signed-off-by | 24 | # - validate signed-off-by |
| 10 | 25 | ||
| 11 | status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied") | 26 | status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied", "inactive-upstream") |
| 12 | 27 | ||
| 13 | class Result: | 28 | class Result: |
| 14 | # Whether the patch has an Upstream-Status or not | 29 | # Whether the patch has an Upstream-Status or not |
| @@ -33,20 +48,18 @@ def blame_patch(patch): | |||
| 33 | From a patch filename, return a list of "commit summary (author name <author | 48 | From a patch filename, return a list of "commit summary (author name <author |
| 34 | email>)" strings representing the history. | 49 | email>)" strings representing the history. |
| 35 | """ | 50 | """ |
| 36 | import subprocess | ||
| 37 | return subprocess.check_output(("git", "log", | 51 | return subprocess.check_output(("git", "log", |
| 38 | "--follow", "--find-renames", "--diff-filter=A", | 52 | "--follow", "--find-renames", "--diff-filter=A", |
| 39 | "--format=%s (%aN <%aE>)", | 53 | "--format=%s (%aN <%aE>)", |
| 40 | "--", patch)).decode("utf-8").splitlines() | 54 | "--", patch), cwd=os.path.dirname(patch)).decode("utf-8").splitlines() |
| 41 | 55 | ||
| 42 | def patchreview(path, patches): | 56 | def patchreview(patches): |
| 43 | import re, os.path | ||
| 44 | 57 | ||
| 45 | # General pattern: start of line, optional whitespace, tag with optional | 58 | # General pattern: start of line, optional whitespace, tag with optional |
| 46 | # hyphen or spaces, maybe a colon, some whitespace, then the value, all case | 59 | # hyphen or spaces, maybe a colon, some whitespace, then the value, all case |
| 47 | # insensitive. | 60 | # insensitive. |
| 48 | sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE) | 61 | sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE) |
| 49 | status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*(\w*)", re.IGNORECASE | re.MULTILINE) | 62 | status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*([\w-]*)", re.IGNORECASE | re.MULTILINE) |
| 50 | cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE) | 63 | cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE) |
| 51 | cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE) | 64 | cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE) |
| 52 | 65 | ||
| @@ -54,11 +67,10 @@ def patchreview(path, patches): | |||
| 54 | 67 | ||
| 55 | for patch in patches: | 68 | for patch in patches: |
| 56 | 69 | ||
| 57 | fullpath = os.path.join(path, patch) | ||
| 58 | result = Result() | 70 | result = Result() |
| 59 | results[fullpath] = result | 71 | results[patch] = result |
| 60 | 72 | ||
| 61 | content = open(fullpath, encoding='ascii', errors='ignore').read() | 73 | content = open(patch, encoding='ascii', errors='ignore').read() |
| 62 | 74 | ||
| 63 | # Find the Signed-off-by tag | 75 | # Find the Signed-off-by tag |
| 64 | match = sob_re.search(content) | 76 | match = sob_re.search(content) |
| @@ -70,12 +82,11 @@ def patchreview(path, patches): | |||
| 70 | else: | 82 | else: |
| 71 | result.missing_sob = True | 83 | result.missing_sob = True |
| 72 | 84 | ||
| 73 | |||
| 74 | # Find the Upstream-Status tag | 85 | # Find the Upstream-Status tag |
| 75 | match = status_re.search(content) | 86 | match = status_re.search(content) |
| 76 | if match: | 87 | if match: |
| 77 | value = match.group(1) | 88 | value = oe.qa.check_upstream_status(patch) |
| 78 | if value != "Upstream-Status:": | 89 | if value: |
| 79 | result.malformed_upstream_status = value | 90 | result.malformed_upstream_status = value |
| 80 | 91 | ||
| 81 | value = match.group(2).lower() | 92 | value = match.group(2).lower() |
| @@ -191,29 +202,56 @@ Patches in Pending state: %s""" % (total_patches, | |||
| 191 | def histogram(results): | 202 | def histogram(results): |
| 192 | from toolz import recipes, dicttoolz | 203 | from toolz import recipes, dicttoolz |
| 193 | import math | 204 | import math |
| 205 | |||
| 194 | counts = recipes.countby(lambda r: r.upstream_status, results.values()) | 206 | counts = recipes.countby(lambda r: r.upstream_status, results.values()) |
| 195 | bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts) | 207 | bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts) |
| 196 | for k in bars: | 208 | for k in bars: |
| 197 | print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k])) | 209 | print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k])) |
| 198 | 210 | ||
| 211 | def find_layers(candidate): | ||
| 212 | # candidate can either be the path to a layer directly (eg meta-intel), or a | ||
| 213 | # repository that contains other layers (meta-arm). We can determine what by | ||
| 214 | # looking for a conf/layer.conf file. If that file exists then it's a layer, | ||
| 215 | # otherwise its a repository of layers and we can assume they're called | ||
| 216 | # meta-*. | ||
| 217 | |||
| 218 | if (candidate / "conf" / "layer.conf").exists(): | ||
| 219 | return [candidate.absolute()] | ||
| 220 | else: | ||
| 221 | return [d.absolute() for d in candidate.iterdir() if d.is_dir() and (d.name == "meta" or d.name.startswith("meta-"))] | ||
| 222 | |||
| 223 | # TODO these don't actually handle dynamic-layers/ | ||
| 224 | |||
| 225 | def gather_patches(layers): | ||
| 226 | patches = [] | ||
| 227 | for directory in layers: | ||
| 228 | filenames = subprocess.check_output(("git", "-C", directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff"), universal_newlines=True).split() | ||
| 229 | patches += [os.path.join(directory, f) for f in filenames] | ||
| 230 | return patches | ||
| 231 | |||
| 232 | def count_recipes(layers): | ||
| 233 | count = 0 | ||
| 234 | for directory in layers: | ||
| 235 | output = subprocess.check_output(["git", "-C", directory, "ls-files", "recipes-*/**/*.bb"], universal_newlines=True) | ||
| 236 | count += len(output.splitlines()) | ||
| 237 | return count | ||
| 199 | 238 | ||
| 200 | if __name__ == "__main__": | 239 | if __name__ == "__main__": |
| 201 | import argparse, subprocess, os | ||
| 202 | |||
| 203 | args = argparse.ArgumentParser(description="Patch Review Tool") | 240 | args = argparse.ArgumentParser(description="Patch Review Tool") |
| 204 | args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches") | 241 | args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches") |
| 205 | args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results") | 242 | args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results") |
| 206 | args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram") | 243 | args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram") |
| 207 | args.add_argument("-j", "--json", help="update JSON") | 244 | args.add_argument("-j", "--json", help="update JSON") |
| 208 | args.add_argument("directory", help="directory to scan") | 245 | args.add_argument("directory", type=pathlib.Path, metavar="DIRECTORY", help="directory to scan (layer, or repository of layers)") |
| 209 | args = args.parse_args() | 246 | args = args.parse_args() |
| 210 | 247 | ||
| 211 | patches = subprocess.check_output(("git", "-C", args.directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff")).decode("utf-8").split() | 248 | layers = find_layers(args.directory) |
| 212 | results = patchreview(args.directory, patches) | 249 | print(f"Found layers {' '.join((d.name for d in layers))}") |
| 250 | patches = gather_patches(layers) | ||
| 251 | results = patchreview(patches) | ||
| 213 | analyse(results, want_blame=args.blame, verbose=args.verbose) | 252 | analyse(results, want_blame=args.blame, verbose=args.verbose) |
| 214 | 253 | ||
| 215 | if args.json: | 254 | if args.json: |
| 216 | import json, os.path, collections | ||
| 217 | if os.path.isfile(args.json): | 255 | if os.path.isfile(args.json): |
| 218 | data = json.load(open(args.json)) | 256 | data = json.load(open(args.json)) |
| 219 | else: | 257 | else: |
| @@ -221,7 +259,11 @@ if __name__ == "__main__": | |||
| 221 | 259 | ||
| 222 | row = collections.Counter() | 260 | row = collections.Counter() |
| 223 | row["total"] = len(results) | 261 | row["total"] = len(results) |
| 224 | row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"]).decode("utf-8").strip() | 262 | row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"], universal_newlines=True).strip() |
| 263 | row["commit"] = subprocess.check_output(["git", "-C", args.directory, "rev-parse", "HEAD"], universal_newlines=True).strip() | ||
| 264 | row['commit_count'] = subprocess.check_output(["git", "-C", args.directory, "rev-list", "--count", "HEAD"], universal_newlines=True).strip() | ||
| 265 | row['recipe_count'] = count_recipes(layers) | ||
| 266 | |||
| 225 | for r in results.values(): | 267 | for r in results.values(): |
| 226 | if r.upstream_status in status_values: | 268 | if r.upstream_status in status_values: |
| 227 | row[r.upstream_status] += 1 | 269 | row[r.upstream_status] += 1 |
| @@ -231,7 +273,7 @@ if __name__ == "__main__": | |||
| 231 | row['malformed-sob'] += 1 | 273 | row['malformed-sob'] += 1 |
| 232 | 274 | ||
| 233 | data.append(row) | 275 | data.append(row) |
| 234 | json.dump(data, open(args.json, "w")) | 276 | json.dump(data, open(args.json, "w"), sort_keys=True, indent="\t") |
| 235 | 277 | ||
| 236 | if args.histogram: | 278 | if args.histogram: |
| 237 | print() | 279 | print() |
diff --git a/scripts/contrib/test_build_time.sh b/scripts/contrib/test_build_time.sh index 23f238adf6..4012ac7ba7 100755 --- a/scripts/contrib/test_build_time.sh +++ b/scripts/contrib/test_build_time.sh | |||
| @@ -97,7 +97,7 @@ if [ $? != 0 ] ; then | |||
| 97 | exit 251 | 97 | exit 251 |
| 98 | fi | 98 | fi |
| 99 | 99 | ||
| 100 | if [ "$BB_ENV_EXTRAWHITE" != "" ] ; then | 100 | if [ "BB_ENV_PASSTHROUGH_ADDITIONS" != "" ] ; then |
| 101 | echo "WARNING: you are running after sourcing the build environment script, this is not recommended" | 101 | echo "WARNING: you are running after sourcing the build environment script, this is not recommended" |
| 102 | fi | 102 | fi |
| 103 | 103 | ||
diff --git a/scripts/contrib/test_build_time_worker.sh b/scripts/contrib/test_build_time_worker.sh index 478e8b0d03..a2879d2336 100755 --- a/scripts/contrib/test_build_time_worker.sh +++ b/scripts/contrib/test_build_time_worker.sh | |||
| @@ -1,5 +1,7 @@ | |||
| 1 | #!/bin/bash | 1 | #!/bin/bash |
| 2 | # | 2 | # |
| 3 | # Copyright OpenEmbedded Contributors | ||
| 4 | # | ||
| 3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
| 4 | # | 6 | # |
| 5 | # This is an example script to be used in conjunction with test_build_time.sh | 7 | # This is an example script to be used in conjunction with test_build_time.sh |
diff --git a/scripts/contrib/verify-homepage.py b/scripts/contrib/verify-homepage.py index 7bffa78e23..a90b5010bc 100755 --- a/scripts/contrib/verify-homepage.py +++ b/scripts/contrib/verify-homepage.py | |||
| @@ -1,5 +1,7 @@ | |||
| 1 | #!/usr/bin/env python3 | 1 | #!/usr/bin/env python3 |
| 2 | # | 2 | # |
| 3 | # Copyright OpenEmbedded Contributors | ||
| 4 | # | ||
| 3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
| 4 | # | 6 | # |
| 5 | # This script can be used to verify HOMEPAGE values for all recipes in | 7 | # This script can be used to verify HOMEPAGE values for all recipes in |
