diff options
Diffstat (limited to 'scripts')
87 files changed, 3492 insertions, 2227 deletions
diff --git a/scripts/b4-wrapper-poky.py b/scripts/b4-wrapper-poky.py new file mode 100755 index 0000000000..f1170db06b --- /dev/null +++ b/scripts/b4-wrapper-poky.py | |||
| @@ -0,0 +1,185 @@ | |||
| 1 | #!/usr/bin/env python3 | ||
| 2 | # | ||
| 3 | # Copyright OpenEmbedded Contributors | ||
| 4 | # | ||
| 5 | # SPDX-License-Identifier: MIT | ||
| 6 | # | ||
| 7 | # This script is to be called by b4: | ||
| 8 | # - through the b4.prep-perpatch-check-cmd with "prep-perpatch-check-cmd" as | ||
| 9 | # first argument, | ||
| 10 | # - through b4.send-auto-cc-cmd with "send-auto-cc-cmd" as first argument, | ||
| 11 | # - through b4.send-auto-to-cmd with "send-auto-to-cmd" as first argument, | ||
| 12 | # | ||
| 13 | # When prep-perpatch-check-cmd is passsed: | ||
| 14 | # | ||
| 15 | # This checks that a patch makes changes to at most one project in the poky | ||
| 16 | # combo repo (that is, out of yocto-docs, bitbake, openembedded-core combined | ||
| 17 | # into poky and the poky-specific files). | ||
| 18 | # | ||
| 19 | # Printing something to stdout in this file will result in b4 prep --check fail | ||
| 20 | # for the currently parsed patch. | ||
| 21 | # | ||
| 22 | # It checks that all patches in the series make changes to at most one project. | ||
| 23 | # | ||
| 24 | # When send-auto-cc-cmd is passed: | ||
| 25 | # | ||
| 26 | # This returns the list of Cc recipients for a patch. | ||
| 27 | # | ||
| 28 | # When send-auto-to-cmd is passed: | ||
| 29 | # | ||
| 30 | # This returns the list of To recipients for a patch. | ||
| 31 | # | ||
| 32 | # This script takes as stdin a patch. | ||
| 33 | |||
| 34 | import pathlib | ||
| 35 | import re | ||
| 36 | import shutil | ||
| 37 | import subprocess | ||
| 38 | import sys | ||
| 39 | |||
| 40 | cmd = sys.argv[1] | ||
| 41 | |||
| 42 | patch = sys.stdin.readlines() | ||
| 43 | |||
| 44 | # Subject field is used to identify the last patch as this script is called for | ||
| 45 | # each patch. We edit the same file in a series by using the References field | ||
| 46 | # unique identifier to check which projects are modified by earlier patches in | ||
| 47 | # the series. To avoid cluttering the disk, the last patch in the list removes | ||
| 48 | # that shared file. | ||
| 49 | re_subject = re.compile(r'^Subject:.*\[.*PATCH.*\s(\d+)/\1') | ||
| 50 | re_ref = re.compile(r'^References: <(.*)>$') | ||
| 51 | |||
| 52 | subject = None | ||
| 53 | ref = None | ||
| 54 | |||
| 55 | if not shutil.which("lsdiff"): | ||
| 56 | print("lsdiff missing from host, please install patchutils", file=sys.stderr) | ||
| 57 | sys.exit(-1) | ||
| 58 | |||
| 59 | try: | ||
| 60 | one_patch_series = False | ||
| 61 | for line in patch: | ||
| 62 | subject = re_subject.match(line) | ||
| 63 | if subject: | ||
| 64 | # Handle [PATCH 1/1] | ||
| 65 | if subject.group(1) == 1: | ||
| 66 | one_patch_series = True | ||
| 67 | break | ||
| 68 | if re.match(r'^Subject: .*\[.*PATCH[^/]*\]', line): | ||
| 69 | # Single patch is named [PATCH] but if there are prefix, it could be | ||
| 70 | # [PATCH prefix], so handle everything that doesn't have a / | ||
| 71 | # character which is used as separator between current patch number | ||
| 72 | # and total patch number | ||
| 73 | one_patch_series = True | ||
| 74 | break | ||
| 75 | |||
| 76 | if cmd == "prep-perpatch-check-cmd" and not one_patch_series: | ||
| 77 | for line in patch: | ||
| 78 | ref = re_ref.match(line) | ||
| 79 | if ref: | ||
| 80 | break | ||
| 81 | |||
| 82 | if not ref: | ||
| 83 | print("Failed to find ref to cover letter (References:)...", file=sys.stderr) | ||
| 84 | sys.exit(-2) | ||
| 85 | |||
| 86 | ref = ref.group(1) | ||
| 87 | series_check = pathlib.Path(f".tmp-{ref}") | ||
| 88 | |||
| 89 | patch = "".join(patch) | ||
| 90 | |||
| 91 | if cmd == "send-auto-cc-cmd": | ||
| 92 | # Patches to BitBake documentation should also go to yocto-docs mailing list | ||
| 93 | project_paths = { | ||
| 94 | "yocto-docs": ["bitbake/doc/*"], | ||
| 95 | } | ||
| 96 | else: | ||
| 97 | project_paths = { | ||
| 98 | "bitbake": ["bitbake/*"], | ||
| 99 | "yocto-docs": ["documentation/*"], | ||
| 100 | "poky": [ | ||
| 101 | "meta-poky/*", | ||
| 102 | "meta-yocto-bsp/*", | ||
| 103 | "README.hardware.md", | ||
| 104 | "README.poky.md", | ||
| 105 | # scripts/b4-wrapper-poky.py is only run by b4 when in poky | ||
| 106 | # git repo. With that limitation, changes made to .b4-config | ||
| 107 | # can only be for poky's and not OE-Core's as only poky's is | ||
| 108 | # stored in poky git repo. | ||
| 109 | ".b4-config", | ||
| 110 | ], | ||
| 111 | } | ||
| 112 | |||
| 113 | # List of projects touched by this patch | ||
| 114 | projs = [] | ||
| 115 | |||
| 116 | # Any file not matched by any path in project_paths means it is from | ||
| 117 | # OE-Core. | ||
| 118 | # When matching some path in project_paths, remove the matched files from | ||
| 119 | # that list. | ||
| 120 | files_left = subprocess.check_output(["lsdiff", "--strip-match=1", "--strip=1"], | ||
| 121 | input=patch, text=True) | ||
| 122 | files_left = set(files_left) | ||
| 123 | |||
| 124 | for proj, proj_paths in project_paths.items(): | ||
| 125 | lsdiff_args = [f"--include={path}" for path in proj_paths] | ||
| 126 | files = subprocess.check_output(["lsdiff", "--strip-match=1", "--strip=1"] + lsdiff_args, | ||
| 127 | input=patch, text=True) | ||
| 128 | if len(files): | ||
| 129 | files_left = files_left - set(files) | ||
| 130 | projs.append(proj) | ||
| 131 | continue | ||
| 132 | |||
| 133 | # Handle patches made with --no-prefix | ||
| 134 | files = subprocess.check_output(["lsdiff"] + lsdiff_args, | ||
| 135 | input=patch, text=True) | ||
| 136 | if len(files): | ||
| 137 | files_left = files_left - set(files) | ||
| 138 | projs.append(proj) | ||
| 139 | |||
| 140 | # Catch-all for everything not poky-specific or in bitbake/yocto-docs | ||
| 141 | if len(files_left) and cmd != "send-auto-cc-cmd": | ||
| 142 | projs.append("openembedded-core") | ||
| 143 | |||
| 144 | if cmd == "prep-perpatch-check-cmd": | ||
| 145 | if len(projs) > 1: | ||
| 146 | print(f"Diff spans more than one project ({', '.join(sorted(projs))}), split into multiple commits...", | ||
| 147 | file=sys.stderr) | ||
| 148 | sys.exit(-3) | ||
| 149 | |||
| 150 | # No need to check other patches in the series as there aren't any | ||
| 151 | if one_patch_series: | ||
| 152 | sys.exit(0) | ||
| 153 | |||
| 154 | # This should be replaced once b4 supports prep-perseries-check-cmd (or something similar) | ||
| 155 | |||
| 156 | if series_check.exists(): | ||
| 157 | # NOT race-free if b4 decides to parallelize prep-perpatch-check-cmd | ||
| 158 | series_projs = series_check.read_text().split('\n') | ||
| 159 | else: | ||
| 160 | series_projs = [] | ||
| 161 | |||
| 162 | series_projs += projs | ||
| 163 | uniq_series_projs = set(series_projs) | ||
| 164 | # NOT race-free, if b4 decides to parallelize prep-perpatch-check-cmd | ||
| 165 | series_check.write_text('\n'.join(uniq_series_projs)) | ||
| 166 | |||
| 167 | if len(uniq_series_projs) > 1: | ||
| 168 | print(f"Series spans more than one project ({', '.join(sorted(uniq_series_projs))}), split into multiple series...", | ||
| 169 | file=sys.stderr) | ||
| 170 | sys.exit(-4) | ||
| 171 | else: # send-auto-cc-cmd / send-auto-to-cmd | ||
| 172 | ml_projs = { | ||
| 173 | "bitbake": "bitbake-devel@lists.openembedded.org", | ||
| 174 | "yocto-docs": "docs@lists.yoctoproject.org", | ||
| 175 | "poky": "poky@lists.yoctoproject.org", | ||
| 176 | "openembedded-core": "openembedded-core@lists.openembedded.org", | ||
| 177 | } | ||
| 178 | |||
| 179 | print("\n".join([ml_projs[ml] for ml in projs])) | ||
| 180 | |||
| 181 | sys.exit(0) | ||
| 182 | finally: | ||
| 183 | # Last patch in the series, cleanup tmp file | ||
| 184 | if subject and ref and series_check.exists(): | ||
| 185 | series_check.unlink() | ||
diff --git a/scripts/bitbake-prserv-tool b/scripts/bitbake-prserv-tool index 80028342b1..0559c4c38a 100755 --- a/scripts/bitbake-prserv-tool +++ b/scripts/bitbake-prserv-tool | |||
| @@ -55,43 +55,6 @@ do_import () | |||
| 55 | return $ret | 55 | return $ret |
| 56 | } | 56 | } |
| 57 | 57 | ||
| 58 | do_migrate_localcount () | ||
| 59 | { | ||
| 60 | df=`bitbake -R conf/migrate_localcount.conf -e | \ | ||
| 61 | grep ^LOCALCOUNT_DUMPFILE= | cut -f2 -d\"` | ||
| 62 | if [ "x${df}" == "x" ]; | ||
| 63 | then | ||
| 64 | echo "LOCALCOUNT_DUMPFILE is not defined!" | ||
| 65 | return 1 | ||
| 66 | fi | ||
| 67 | |||
| 68 | rm -f $df | ||
| 69 | clean_cache | ||
| 70 | echo "Exporting LOCALCOUNT to AUTOINCs..." | ||
| 71 | bitbake -R conf/migrate_localcount.conf -p | ||
| 72 | [ ! $? -eq 0 ] && echo "Exporting to file $df failed!" && exit 1 | ||
| 73 | |||
| 74 | if [ -e $df ]; | ||
| 75 | then | ||
| 76 | echo "Exporting to file $df succeeded!" | ||
| 77 | else | ||
| 78 | echo "Exporting to file $df failed!" | ||
| 79 | exit 1 | ||
| 80 | fi | ||
| 81 | |||
| 82 | echo "Importing generated AUTOINC entries..." | ||
| 83 | [ -e $df ] && do_import $df | ||
| 84 | |||
| 85 | if [ ! $? -eq 0 ] | ||
| 86 | then | ||
| 87 | echo "Migration from LOCALCOUNT to AUTOINCs failed!" | ||
| 88 | return 1 | ||
| 89 | fi | ||
| 90 | |||
| 91 | echo "Migration from LOCALCOUNT to AUTOINCs succeeded!" | ||
| 92 | return 0 | ||
| 93 | } | ||
| 94 | |||
| 95 | [ $# -eq 0 ] && help && exit 1 | 58 | [ $# -eq 0 ] && help && exit 1 |
| 96 | 59 | ||
| 97 | case $2 in | 60 | case $2 in |
| @@ -110,9 +73,6 @@ export) | |||
| 110 | import) | 73 | import) |
| 111 | do_import $2 | 74 | do_import $2 |
| 112 | ;; | 75 | ;; |
| 113 | migrate_localcount) | ||
| 114 | do_migrate_localcount | ||
| 115 | ;; | ||
| 116 | *) | 76 | *) |
| 117 | help | 77 | help |
| 118 | exit 1 | 78 | exit 1 |
diff --git a/scripts/buildstats-diff b/scripts/buildstats-diff index c9aa76a8fa..df1df432f1 100755 --- a/scripts/buildstats-diff +++ b/scripts/buildstats-diff | |||
| @@ -12,6 +12,7 @@ import glob | |||
| 12 | import logging | 12 | import logging |
| 13 | import math | 13 | import math |
| 14 | import os | 14 | import os |
| 15 | import pathlib | ||
| 15 | import sys | 16 | import sys |
| 16 | from operator import attrgetter | 17 | from operator import attrgetter |
| 17 | 18 | ||
| @@ -251,11 +252,32 @@ Script for comparing buildstats of two separate builds.""" | |||
| 251 | "average over them") | 252 | "average over them") |
| 252 | parser.add_argument('--only-task', dest='only_tasks', metavar='TASK', action='append', default=[], | 253 | parser.add_argument('--only-task', dest='only_tasks', metavar='TASK', action='append', default=[], |
| 253 | help="Only include TASK in report. May be specified multiple times") | 254 | help="Only include TASK in report. May be specified multiple times") |
| 254 | parser.add_argument('buildstats1', metavar='BUILDSTATS1', help="'Left' buildstat") | 255 | parser.add_argument('buildstats1', metavar='BUILDSTATS1', nargs="?", help="'Left' buildstat") |
| 255 | parser.add_argument('buildstats2', metavar='BUILDSTATS2', help="'Right' buildstat") | 256 | parser.add_argument('buildstats2', metavar='BUILDSTATS2', nargs="?", help="'Right' buildstat") |
| 256 | 257 | ||
| 257 | args = parser.parse_args(argv) | 258 | args = parser.parse_args(argv) |
| 258 | 259 | ||
| 260 | if args.buildstats1 and args.buildstats2: | ||
| 261 | # Both paths specified | ||
| 262 | pass | ||
| 263 | elif args.buildstats1 or args.buildstats2: | ||
| 264 | # Just one path specified, this is an error | ||
| 265 | parser.print_usage(sys.stderr) | ||
| 266 | print("Either specify two buildstats paths, or none to use the last two paths.", file=sys.stderr) | ||
| 267 | sys.exit(1) | ||
| 268 | else: | ||
| 269 | # No paths specified, try to find the last two buildstats | ||
| 270 | try: | ||
| 271 | buildstats_dir = pathlib.Path(os.environ["BUILDDIR"]) / "tmp" / "buildstats" | ||
| 272 | paths = sorted(buildstats_dir.iterdir()) | ||
| 273 | args.buildstats2 = paths.pop() | ||
| 274 | args.buildstats1 = paths.pop() | ||
| 275 | print(f"Comparing {args.buildstats1} -> {args.buildstats2}\n") | ||
| 276 | except KeyError: | ||
| 277 | parser.print_usage(sys.stderr) | ||
| 278 | print("Build environment has not been configured, cannot find buildstats", file=sys.stderr) | ||
| 279 | sys.exit(1) | ||
| 280 | |||
| 259 | # We do not nedd/want to read all buildstats if we just want to look at the | 281 | # We do not nedd/want to read all buildstats if we just want to look at the |
| 260 | # package versions | 282 | # package versions |
| 261 | if args.ver_diff: | 283 | if args.ver_diff: |
diff --git a/scripts/buildstats-summary b/scripts/buildstats-summary index b10c671b29..cc2a27722a 100755 --- a/scripts/buildstats-summary +++ b/scripts/buildstats-summary | |||
| @@ -87,7 +87,11 @@ def main(argv=None) -> int: | |||
| 87 | ) | 87 | ) |
| 88 | 88 | ||
| 89 | parser.add_argument( | 89 | parser.add_argument( |
| 90 | "buildstats", metavar="BUILDSTATS", help="Buildstats file", type=pathlib.Path | 90 | "buildstats", |
| 91 | metavar="BUILDSTATS", | ||
| 92 | nargs="?", | ||
| 93 | type=pathlib.Path, | ||
| 94 | help="Buildstats file, or latest if not specified", | ||
| 91 | ) | 95 | ) |
| 92 | parser.add_argument( | 96 | parser.add_argument( |
| 93 | "--sort", | 97 | "--sort", |
| @@ -116,6 +120,16 @@ def main(argv=None) -> int: | |||
| 116 | 120 | ||
| 117 | args = parser.parse_args(argv) | 121 | args = parser.parse_args(argv) |
| 118 | 122 | ||
| 123 | # If a buildstats file wasn't specified, try to find the last one | ||
| 124 | if not args.buildstats: | ||
| 125 | try: | ||
| 126 | builddir = pathlib.Path(os.environ["BUILDDIR"]) | ||
| 127 | buildstats_dir = builddir / "tmp" / "buildstats" | ||
| 128 | args.buildstats = sorted(buildstats_dir.iterdir())[-1] | ||
| 129 | except KeyError: | ||
| 130 | print("Build environment has not been configured, cannot find buildstats") | ||
| 131 | return 1 | ||
| 132 | |||
| 119 | bs = read_buildstats(args.buildstats) | 133 | bs = read_buildstats(args.buildstats) |
| 120 | dump_buildstats(args, bs) | 134 | dump_buildstats(args, bs) |
| 121 | 135 | ||
diff --git a/scripts/clean-hashserver-database b/scripts/clean-hashserver-database new file mode 100755 index 0000000000..9fa162c981 --- /dev/null +++ b/scripts/clean-hashserver-database | |||
| @@ -0,0 +1,77 @@ | |||
| 1 | #!/bin/bash | ||
| 2 | set -euo pipefail | ||
| 3 | |||
| 4 | SSTATE_DIR="" | ||
| 5 | BB_HASHCLIENT="" | ||
| 6 | BB_HASHSERVER="" | ||
| 7 | |||
| 8 | ALIVE_DB_MARK="alive" | ||
| 9 | CLEAN_DB="false" | ||
| 10 | THRESHOLD_AGE="3600" | ||
| 11 | |||
| 12 | function help() { | ||
| 13 | cat <<HELP_TEXT | ||
| 14 | Usage: $0 --sstate-dir path --hashclient path --hashserver-address address \ | ||
| 15 | [--mark value] [--clean-db] [--threshold-age seconds] | ||
| 16 | |||
| 17 | Auxiliary script remove unused or no longer relevant entries from the hashequivalence database, based | ||
| 18 | on the files available on the sstate directory. | ||
| 19 | |||
| 20 | -h | --help) Show this help message and exit | ||
| 21 | -a | --hashserver-adress) bitbake-hashserver address | ||
| 22 | -c | --hashclient) Path to bitbake-hashclient | ||
| 23 | -m | --mark) Marker string to mark database entries | ||
| 24 | -s | --sstate-dir) Path to the sstate dir | ||
| 25 | -t | --threshold-age) Remove unused entries older than SECONDS old (default: 3600) | ||
| 26 | --clean-db) Remove all unmarked and unused entries from the database | ||
| 27 | HELP_TEXT | ||
| 28 | } | ||
| 29 | |||
| 30 | function argument_parser() { | ||
| 31 | while [ $# -gt 0 ]; do | ||
| 32 | case "$1" in | ||
| 33 | -h | --help) help; exit 0 ;; | ||
| 34 | -a | --hashserver-address) BB_HASHSERVER="$2"; shift ;; | ||
| 35 | -c | --hashclient) BB_HASHCLIENT="$2"; shift ;; | ||
| 36 | -m | --mark) ALIVE_DB_MARK="$2"; shift ;; | ||
| 37 | -s | --sstate-dir) SSTATE_DIR="$2"; shift ;; | ||
| 38 | -t | --threshold-age) THRESHOLD_AGE="$2"; shift ;; | ||
| 39 | --clean-db) CLEAN_DB="true";; | ||
| 40 | *) | ||
| 41 | echo "Argument '$1' is not supported" >&2 | ||
| 42 | help >&2 | ||
| 43 | exit 1 | ||
| 44 | ;; | ||
| 45 | esac | ||
| 46 | shift | ||
| 47 | done | ||
| 48 | |||
| 49 | function validate_mandatory_argument() { | ||
| 50 | local var_value="$1" | ||
| 51 | local error_message="$2" | ||
| 52 | |||
| 53 | if [ -z "$var_value" ]; then | ||
| 54 | echo "$error_message" | ||
| 55 | help >&2 | ||
| 56 | exit 1 | ||
| 57 | fi | ||
| 58 | } | ||
| 59 | |||
| 60 | validate_mandatory_argument "$SSTATE_DIR" "Please provide the path to the sstate dir." | ||
| 61 | validate_mandatory_argument "$BB_HASHCLIENT" "Please provide the path to bitbake-hashclient." | ||
| 62 | validate_mandatory_argument "$BB_HASHSERVER" "Please provide the address of bitbake-hashserver." | ||
| 63 | } | ||
| 64 | |||
| 65 | # -- main code -- | ||
| 66 | argument_parser $@ | ||
| 67 | |||
| 68 | # Mark all db sstate hashes | ||
| 69 | find "$SSTATE_DIR" -name "*.tar.zst" | \ | ||
| 70 | sed 's/.*:\([^_]*\)_.*/unihash \1/' | \ | ||
| 71 | $BB_HASHCLIENT --address "$BB_HASHSERVER" gc-mark-stream "$ALIVE_DB_MARK" | ||
| 72 | |||
| 73 | # Remove unmarked and unused entries | ||
| 74 | if [ "$CLEAN_DB" = "true" ]; then | ||
| 75 | $BB_HASHCLIENT --address "$BB_HASHSERVER" gc-sweep "$ALIVE_DB_MARK" | ||
| 76 | $BB_HASHCLIENT --address "$BB_HASHSERVER" clean-unused "$THRESHOLD_AGE" | ||
| 77 | fi | ||
diff --git a/scripts/contrib/convert-spdx-licenses.py b/scripts/contrib/convert-spdx-licenses.py index 4e194dee3f..13cf12a33f 100755 --- a/scripts/contrib/convert-spdx-licenses.py +++ b/scripts/contrib/convert-spdx-licenses.py | |||
| @@ -93,7 +93,7 @@ license_map = { | |||
| 93 | "Nauman" : "Naumen", | 93 | "Nauman" : "Naumen", |
| 94 | "tcl" : "TCL", | 94 | "tcl" : "TCL", |
| 95 | "vim" : "Vim", | 95 | "vim" : "Vim", |
| 96 | "SGIv1" : "SGI-1", | 96 | "SGIv1" : "SGI-OpenGL", |
| 97 | } | 97 | } |
| 98 | 98 | ||
| 99 | def processfile(fn): | 99 | def processfile(fn): |
diff --git a/scripts/contrib/improve_kernel_cve_report.py b/scripts/contrib/improve_kernel_cve_report.py new file mode 100755 index 0000000000..5c39df05a5 --- /dev/null +++ b/scripts/contrib/improve_kernel_cve_report.py | |||
| @@ -0,0 +1,473 @@ | |||
| 1 | #! /usr/bin/env python3 | ||
| 2 | # | ||
| 3 | # Copyright OpenEmbedded Contributors | ||
| 4 | # | ||
| 5 | # The script uses another source of CVE information from linux-vulns | ||
| 6 | # to enrich the cve-summary from cve-check or vex. | ||
| 7 | # It can also use the list of compiled files from the kernel spdx to ignore CVEs | ||
| 8 | # that are not affected since the files are not compiled. | ||
| 9 | # | ||
| 10 | # It creates a new json file with updated CVE information | ||
| 11 | # | ||
| 12 | # Compiled files can be extracted adding the following in local.conf | ||
| 13 | # SPDX_INCLUDE_COMPILED_SOURCES:pn-linux-yocto = "1" | ||
| 14 | # | ||
| 15 | # Tested with the following CVE sources: | ||
| 16 | # - https://git.kernel.org/pub/scm/linux/security/vulns.git | ||
| 17 | # - https://github.com/CVEProject/cvelistV5 | ||
| 18 | # | ||
| 19 | # Example: | ||
| 20 | # python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --kernel-version 6.12.27 --datadir ./vulns | ||
| 21 | # python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --datadir ./vulns --old-cve-report build/tmp/log/cve/cve-summary.json | ||
| 22 | # | ||
| 23 | # SPDX-License-Identifier: GPLv2 | ||
| 24 | |||
| 25 | import argparse | ||
| 26 | import json | ||
| 27 | import sys | ||
| 28 | import logging | ||
| 29 | import glob | ||
| 30 | import os | ||
| 31 | import pathlib | ||
| 32 | from packaging.version import Version | ||
| 33 | |||
| 34 | def is_linux_cve(cve_info): | ||
| 35 | '''Return true is the CVE belongs to Linux''' | ||
| 36 | if not "affected" in cve_info["containers"]["cna"]: | ||
| 37 | return False | ||
| 38 | for affected in cve_info["containers"]["cna"]["affected"]: | ||
| 39 | if not "product" in affected: | ||
| 40 | return False | ||
| 41 | if affected["product"] == "Linux" and affected["vendor"] == "Linux": | ||
| 42 | return True | ||
| 43 | return False | ||
| 44 | |||
| 45 | def get_kernel_cves(datadir, compiled_files, version): | ||
| 46 | """ | ||
| 47 | Get CVEs for the kernel | ||
| 48 | """ | ||
| 49 | cves = {} | ||
| 50 | |||
| 51 | check_config = len(compiled_files) > 0 | ||
| 52 | |||
| 53 | base_version = Version(f"{version.major}.{version.minor}") | ||
| 54 | |||
| 55 | # Check all CVES from kernel vulns | ||
| 56 | pattern = os.path.join(datadir, '**', "CVE-*.json") | ||
| 57 | cve_files = glob.glob(pattern, recursive=True) | ||
| 58 | not_applicable_config = 0 | ||
| 59 | fixed_as_later_backport = 0 | ||
| 60 | vulnerable = 0 | ||
| 61 | not_vulnerable = 0 | ||
| 62 | for cve_file in sorted(cve_files): | ||
| 63 | cve_info = {} | ||
| 64 | with open(cve_file, "r", encoding='ISO-8859-1') as f: | ||
| 65 | cve_info = json.load(f) | ||
| 66 | |||
| 67 | if len(cve_info) == 0: | ||
| 68 | logging.error("Not valid data in %s. Aborting", cve_file) | ||
| 69 | break | ||
| 70 | |||
| 71 | if not is_linux_cve(cve_info): | ||
| 72 | continue | ||
| 73 | cve_id = os.path.basename(cve_file)[:-5] | ||
| 74 | description = cve_info["containers"]["cna"]["descriptions"][0]["value"] | ||
| 75 | if cve_file.find("rejected") >= 0: | ||
| 76 | logging.debug("%s is rejected by the CNA", cve_id) | ||
| 77 | cves[cve_id] = { | ||
| 78 | "id": cve_id, | ||
| 79 | "status": "Ignored", | ||
| 80 | "detail": "rejected", | ||
| 81 | "summary": description, | ||
| 82 | "description": f"Rejected by CNA" | ||
| 83 | } | ||
| 84 | continue | ||
| 85 | if any(elem in cve_file for elem in ["review", "reverved", "testing"]): | ||
| 86 | continue | ||
| 87 | |||
| 88 | is_vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected_versions = get_cpe_applicability(cve_info, version) | ||
| 89 | |||
| 90 | logging.debug("%s: %s (%s - %s) (%s - %s)", cve_id, is_vulnerable, better_match_first, better_match_last, first_affected, last_affected) | ||
| 91 | |||
| 92 | if is_vulnerable is None: | ||
| 93 | logging.warning("%s doesn't have good metadata", cve_id) | ||
| 94 | if is_vulnerable: | ||
| 95 | is_affected = True | ||
| 96 | affected_files = [] | ||
| 97 | if check_config: | ||
| 98 | is_affected, affected_files = check_kernel_compiled_files(compiled_files, cve_info) | ||
| 99 | |||
| 100 | if not is_affected and len(affected_files) > 0: | ||
| 101 | logging.debug( | ||
| 102 | "%s - not applicable configuration since affected files not compiled: %s", | ||
| 103 | cve_id, affected_files) | ||
| 104 | cves[cve_id] = { | ||
| 105 | "id": cve_id, | ||
| 106 | "status": "Ignored", | ||
| 107 | "detail": "not-applicable-config", | ||
| 108 | "summary": description, | ||
| 109 | "description": f"Source code not compiled by config. {affected_files}" | ||
| 110 | } | ||
| 111 | not_applicable_config +=1 | ||
| 112 | # Check if we have backport | ||
| 113 | else: | ||
| 114 | if not better_match_last: | ||
| 115 | fixed_in = last_affected | ||
| 116 | else: | ||
| 117 | fixed_in = better_match_last | ||
| 118 | logging.debug("%s needs backporting (fixed from %s)", cve_id, fixed_in) | ||
| 119 | cves[cve_id] = { | ||
| 120 | "id": cve_id, | ||
| 121 | "status": "Unpatched", | ||
| 122 | "detail": "version-in-range", | ||
| 123 | "summary": description, | ||
| 124 | "description": f"Needs backporting (fixed from {fixed_in})" | ||
| 125 | } | ||
| 126 | vulnerable += 1 | ||
| 127 | if (better_match_last and | ||
| 128 | Version(f"{better_match_last.major}.{better_match_last.minor}") == base_version): | ||
| 129 | fixed_as_later_backport += 1 | ||
| 130 | # Not vulnerable | ||
| 131 | else: | ||
| 132 | if not first_affected: | ||
| 133 | logging.debug("%s - not known affected %s", | ||
| 134 | cve_id, | ||
| 135 | better_match_last) | ||
| 136 | cves[cve_id] = { | ||
| 137 | "id": cve_id, | ||
| 138 | "status": "Patched", | ||
| 139 | "detail": "version-not-in-range", | ||
| 140 | "summary": description, | ||
| 141 | "description": "No CPE match" | ||
| 142 | } | ||
| 143 | not_vulnerable += 1 | ||
| 144 | continue | ||
| 145 | backport_base = Version(f"{better_match_last.major}.{better_match_last.minor}") | ||
| 146 | if version < first_affected: | ||
| 147 | logging.debug('%s - fixed-version: only affects %s onwards', | ||
| 148 | cve_id, | ||
| 149 | first_affected) | ||
| 150 | cves[cve_id] = { | ||
| 151 | "id": cve_id, | ||
| 152 | "status": "Patched", | ||
| 153 | "detail": "fixed-version", | ||
| 154 | "summary": description, | ||
| 155 | "description": f"only affects {first_affected} onwards" | ||
| 156 | } | ||
| 157 | not_vulnerable += 1 | ||
| 158 | elif last_affected <= version: | ||
| 159 | logging.debug("%s - fixed-version: Fixed from version %s", | ||
| 160 | cve_id, | ||
| 161 | last_affected) | ||
| 162 | cves[cve_id] = { | ||
| 163 | "id": cve_id, | ||
| 164 | "status": "Patched", | ||
| 165 | "detail": "fixed-version", | ||
| 166 | "summary": description, | ||
| 167 | "description": f"fixed-version: Fixed from version {last_affected}" | ||
| 168 | } | ||
| 169 | not_vulnerable += 1 | ||
| 170 | elif backport_base == base_version: | ||
| 171 | logging.debug("%s - cpe-stable-backport: Backported in %s", | ||
| 172 | cve_id, | ||
| 173 | better_match_last) | ||
| 174 | cves[cve_id] = { | ||
| 175 | "id": cve_id, | ||
| 176 | "status": "Patched", | ||
| 177 | "detail": "cpe-stable-backport", | ||
| 178 | "summary": description, | ||
| 179 | "description": f"Backported in {better_match_last}" | ||
| 180 | } | ||
| 181 | not_vulnerable += 1 | ||
| 182 | else: | ||
| 183 | logging.debug("%s - version not affected %s", cve_id, str(affected_versions)) | ||
| 184 | cves[cve_id] = { | ||
| 185 | "id": cve_id, | ||
| 186 | "status": "Patched", | ||
| 187 | "detail": "version-not-in-range", | ||
| 188 | "summary": description, | ||
| 189 | "description": f"Range {affected_versions}" | ||
| 190 | } | ||
| 191 | not_vulnerable += 1 | ||
| 192 | |||
| 193 | logging.info("Total CVEs ignored due to not applicable config: %d", not_applicable_config) | ||
| 194 | logging.info("Total CVEs not vulnerable due version-not-in-range: %d", not_vulnerable) | ||
| 195 | logging.info("Total vulnerable CVEs: %d", vulnerable) | ||
| 196 | |||
| 197 | logging.info("Total CVEs already backported in %s: %s", base_version, | ||
| 198 | fixed_as_later_backport) | ||
| 199 | return cves | ||
| 200 | |||
| 201 | def read_spdx(spdx_file): | ||
| 202 | '''Open SPDX file and extract compiled files''' | ||
| 203 | with open(spdx_file, 'r', encoding='ISO-8859-1') as f: | ||
| 204 | spdx = json.load(f) | ||
| 205 | if "spdxVersion" in spdx: | ||
| 206 | if spdx["spdxVersion"] == "SPDX-2.2": | ||
| 207 | return read_spdx2(spdx) | ||
| 208 | if "@graph" in spdx: | ||
| 209 | return read_spdx3(spdx) | ||
| 210 | return [] | ||
| 211 | |||
| 212 | def read_spdx2(spdx): | ||
| 213 | ''' | ||
| 214 | Read spdx2 compiled files from spdx | ||
| 215 | ''' | ||
| 216 | cfiles = set() | ||
| 217 | if 'files' not in spdx: | ||
| 218 | return cfiles | ||
| 219 | for item in spdx['files']: | ||
| 220 | for ftype in item['fileTypes']: | ||
| 221 | if ftype == "SOURCE": | ||
| 222 | filename = item["fileName"][item["fileName"].find("/")+1:] | ||
| 223 | cfiles.add(filename) | ||
| 224 | return cfiles | ||
| 225 | |||
| 226 | def read_spdx3(spdx): | ||
| 227 | ''' | ||
| 228 | Read spdx3 compiled files from spdx | ||
| 229 | ''' | ||
| 230 | cfiles = set() | ||
| 231 | for item in spdx["@graph"]: | ||
| 232 | if "software_primaryPurpose" not in item: | ||
| 233 | continue | ||
| 234 | if item["software_primaryPurpose"] == "source": | ||
| 235 | filename = item['name'][item['name'].find("/")+1:] | ||
| 236 | cfiles.add(filename) | ||
| 237 | return cfiles | ||
| 238 | |||
| 239 | def check_kernel_compiled_files(compiled_files, cve_info): | ||
| 240 | """ | ||
| 241 | Return if a CVE affected us depending on compiled files | ||
| 242 | """ | ||
| 243 | files_affected = set() | ||
| 244 | is_affected = False | ||
| 245 | |||
| 246 | for item in cve_info['containers']['cna']['affected']: | ||
| 247 | if "programFiles" in item: | ||
| 248 | for f in item['programFiles']: | ||
| 249 | if f not in files_affected: | ||
| 250 | files_affected.add(f) | ||
| 251 | |||
| 252 | if len(files_affected) > 0: | ||
| 253 | for f in files_affected: | ||
| 254 | if f in compiled_files: | ||
| 255 | logging.debug("File match: %s", f) | ||
| 256 | is_affected = True | ||
| 257 | return is_affected, files_affected | ||
| 258 | |||
| 259 | def get_cpe_applicability(cve_info, v): | ||
| 260 | ''' | ||
| 261 | Check if version is affected and return affected versions | ||
| 262 | ''' | ||
| 263 | base_branch = Version(f"{v.major}.{v.minor}") | ||
| 264 | affected = [] | ||
| 265 | if not 'cpeApplicability' in cve_info["containers"]["cna"]: | ||
| 266 | return None, None, None, None, None, None | ||
| 267 | |||
| 268 | for nodes in cve_info["containers"]["cna"]["cpeApplicability"]: | ||
| 269 | for node in nodes.values(): | ||
| 270 | vulnerable = False | ||
| 271 | matched_branch = False | ||
| 272 | first_affected = Version("5000") | ||
| 273 | last_affected = Version("0") | ||
| 274 | better_match_first = Version("0") | ||
| 275 | better_match_last = Version("5000") | ||
| 276 | |||
| 277 | if len(node[0]['cpeMatch']) == 0: | ||
| 278 | first_affected = None | ||
| 279 | last_affected = None | ||
| 280 | better_match_first = None | ||
| 281 | better_match_last = None | ||
| 282 | |||
| 283 | for cpe_match in node[0]['cpeMatch']: | ||
| 284 | version_start_including = Version("0") | ||
| 285 | version_end_excluding = Version("0") | ||
| 286 | if 'versionStartIncluding' in cpe_match: | ||
| 287 | version_start_including = Version(cpe_match['versionStartIncluding']) | ||
| 288 | else: | ||
| 289 | version_start_including = Version("0") | ||
| 290 | # if versionEndExcluding is missing we are in a branch, which is not fixed. | ||
| 291 | if "versionEndExcluding" in cpe_match: | ||
| 292 | version_end_excluding = Version(cpe_match["versionEndExcluding"]) | ||
| 293 | else: | ||
| 294 | # if versionEndExcluding is missing we are in a branch, which is not fixed. | ||
| 295 | version_end_excluding = Version( | ||
| 296 | f"{version_start_including.major}.{version_start_including.minor}.5000" | ||
| 297 | ) | ||
| 298 | affected.append(f" {version_start_including}-{version_end_excluding}") | ||
| 299 | # Detect if versionEnd is in fixed in base branch. It has precedence over the rest | ||
| 300 | branch_end = Version(f"{version_end_excluding.major}.{version_end_excluding.minor}") | ||
| 301 | if branch_end == base_branch: | ||
| 302 | if version_start_including <= v < version_end_excluding: | ||
| 303 | vulnerable = cpe_match['vulnerable'] | ||
| 304 | # If we don't match in our branch, we are not vulnerable, | ||
| 305 | # since we have a backport | ||
| 306 | matched_branch = True | ||
| 307 | better_match_first = version_start_including | ||
| 308 | better_match_last = version_end_excluding | ||
| 309 | if version_start_including <= v < version_end_excluding and not matched_branch: | ||
| 310 | if version_end_excluding < better_match_last: | ||
| 311 | better_match_first = max(version_start_including, better_match_first) | ||
| 312 | better_match_last = min(better_match_last, version_end_excluding) | ||
| 313 | vulnerable = cpe_match['vulnerable'] | ||
| 314 | matched_branch = True | ||
| 315 | |||
| 316 | first_affected = min(version_start_including, first_affected) | ||
| 317 | last_affected = max(version_end_excluding, last_affected) | ||
| 318 | # Not a better match, we use the first and last affected instead of the fake .5000 | ||
| 319 | if vulnerable and better_match_last == Version(f"{base_branch}.5000"): | ||
| 320 | better_match_last = last_affected | ||
| 321 | better_match_first = first_affected | ||
| 322 | return vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected | ||
| 323 | |||
| 324 | def copy_data(old, new): | ||
| 325 | '''Update dictionary with new entries, while keeping the old ones''' | ||
| 326 | for k in new.keys(): | ||
| 327 | old[k] = new[k] | ||
| 328 | return old | ||
| 329 | |||
| 330 | # Function taken from cve_check.bbclass. Adapted to cve fields | ||
| 331 | def cve_update(cve_data, cve, entry): | ||
| 332 | # If no entry, just add it | ||
| 333 | if cve not in cve_data: | ||
| 334 | cve_data[cve] = entry | ||
| 335 | return | ||
| 336 | # If we are updating, there might be change in the status | ||
| 337 | if cve_data[cve]['status'] == "Unknown": | ||
| 338 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
| 339 | return | ||
| 340 | if cve_data[cve]['status'] == entry['status']: | ||
| 341 | return | ||
| 342 | if entry['status'] == "Unpatched" and cve_data[cve]['status'] == "Patched": | ||
| 343 | # Backported-patch (e.g. vendor kernel repo with cherry-picked CVE patch) | ||
| 344 | # has priority over unpatch from CNA | ||
| 345 | if cve_data[cve]['detail'] == "backported-patch": | ||
| 346 | return | ||
| 347 | logging.warning("CVE entry %s update from Patched to Unpatched from the scan result", cve) | ||
| 348 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
| 349 | return | ||
| 350 | if entry['status'] == "Patched" and cve_data[cve]['status'] == "Unpatched": | ||
| 351 | logging.warning("CVE entry %s update from Unpatched to Patched from the scan result", cve) | ||
| 352 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
| 353 | return | ||
| 354 | # If we have an "Ignored", it has a priority | ||
| 355 | if cve_data[cve]['status'] == "Ignored": | ||
| 356 | logging.debug("CVE %s not updating because Ignored", cve) | ||
| 357 | return | ||
| 358 | # If we have an "Ignored", it has a priority | ||
| 359 | if entry['status'] == "Ignored": | ||
| 360 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
| 361 | logging.debug("CVE entry %s updated from Unpatched to Ignored", cve) | ||
| 362 | return | ||
| 363 | logging.warning("Unhandled CVE entry update for %s %s from %s %s to %s", | ||
| 364 | cve, cve_data[cve]['status'], cve_data[cve]['detail'], entry['status'], entry['detail']) | ||
| 365 | |||
| 366 | def main(): | ||
| 367 | parser = argparse.ArgumentParser( | ||
| 368 | description="Update cve-summary with kernel compiled files and kernel CVE information" | ||
| 369 | ) | ||
| 370 | parser.add_argument( | ||
| 371 | "-s", | ||
| 372 | "--spdx", | ||
| 373 | help="SPDX2/3 for the kernel. Needs to include compiled sources", | ||
| 374 | ) | ||
| 375 | parser.add_argument( | ||
| 376 | "--datadir", | ||
| 377 | type=pathlib.Path, | ||
| 378 | help="Directory where CVE data is", | ||
| 379 | required=True | ||
| 380 | ) | ||
| 381 | parser.add_argument( | ||
| 382 | "--old-cve-report", | ||
| 383 | help="CVE report to update. (Optional)", | ||
| 384 | ) | ||
| 385 | parser.add_argument( | ||
| 386 | "--kernel-version", | ||
| 387 | help="Kernel version. Needed if old cve_report is not provided (Optional)", | ||
| 388 | type=Version | ||
| 389 | ) | ||
| 390 | parser.add_argument( | ||
| 391 | "--new-cve-report", | ||
| 392 | help="Output file", | ||
| 393 | default="cve-summary-enhance.json" | ||
| 394 | ) | ||
| 395 | parser.add_argument( | ||
| 396 | "-D", | ||
| 397 | "--debug", | ||
| 398 | help='Enable debug ', | ||
| 399 | action="store_true") | ||
| 400 | |||
| 401 | args = parser.parse_args() | ||
| 402 | |||
| 403 | if args.debug: | ||
| 404 | log_level=logging.DEBUG | ||
| 405 | else: | ||
| 406 | log_level=logging.INFO | ||
| 407 | logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', level=log_level) | ||
| 408 | |||
| 409 | if not args.kernel_version and not args.old_cve_report: | ||
| 410 | parser.error("either --kernel-version or --old-cve-report are needed") | ||
| 411 | return -1 | ||
| 412 | |||
| 413 | # by default we don't check the compiled files, unless provided | ||
| 414 | compiled_files = [] | ||
| 415 | if args.spdx: | ||
| 416 | compiled_files = read_spdx(args.spdx) | ||
| 417 | logging.info("Total compiled files %d", len(compiled_files)) | ||
| 418 | |||
| 419 | if args.old_cve_report: | ||
| 420 | with open(args.old_cve_report, encoding='ISO-8859-1') as f: | ||
| 421 | cve_report = json.load(f) | ||
| 422 | else: | ||
| 423 | #If summary not provided, we create one | ||
| 424 | cve_report = { | ||
| 425 | "version": "1", | ||
| 426 | "package": [ | ||
| 427 | { | ||
| 428 | "name": "linux-yocto", | ||
| 429 | "version": str(args.kernel_version), | ||
| 430 | "products": [ | ||
| 431 | { | ||
| 432 | "product": "linux_kernel", | ||
| 433 | "cvesInRecord": "Yes" | ||
| 434 | } | ||
| 435 | ], | ||
| 436 | "issue": [] | ||
| 437 | } | ||
| 438 | ] | ||
| 439 | } | ||
| 440 | |||
| 441 | for pkg in cve_report['package']: | ||
| 442 | is_kernel = False | ||
| 443 | for product in pkg['products']: | ||
| 444 | if product['product'] == "linux_kernel": | ||
| 445 | is_kernel=True | ||
| 446 | if not is_kernel: | ||
| 447 | continue | ||
| 448 | # We remove custom versions after - | ||
| 449 | upstream_version = Version(pkg["version"].split("-")[0]) | ||
| 450 | logging.info("Checking kernel %s", upstream_version) | ||
| 451 | kernel_cves = get_kernel_cves(args.datadir, | ||
| 452 | compiled_files, | ||
| 453 | upstream_version) | ||
| 454 | logging.info("Total kernel cves from kernel CNA: %s", len(kernel_cves)) | ||
| 455 | cves = {issue["id"]: issue for issue in pkg["issue"]} | ||
| 456 | logging.info("Total kernel before processing cves: %s", len(cves)) | ||
| 457 | |||
| 458 | for cve in kernel_cves: | ||
| 459 | cve_update(cves, cve, kernel_cves[cve]) | ||
| 460 | |||
| 461 | pkg["issue"] = [] | ||
| 462 | for cve in sorted(cves): | ||
| 463 | pkg["issue"].extend([cves[cve]]) | ||
| 464 | logging.info("Total kernel cves after processing: %s", len(pkg['issue'])) | ||
| 465 | |||
| 466 | with open(args.new_cve_report, "w", encoding='ISO-8859-1') as f: | ||
| 467 | json.dump(cve_report, f, indent=2) | ||
| 468 | |||
| 469 | return 0 | ||
| 470 | |||
| 471 | if __name__ == "__main__": | ||
| 472 | sys.exit(main()) | ||
| 473 | |||
diff --git a/scripts/contrib/make-spdx-bindings.sh b/scripts/contrib/make-spdx-bindings.sh new file mode 100755 index 0000000000..31caaf339d --- /dev/null +++ b/scripts/contrib/make-spdx-bindings.sh | |||
| @@ -0,0 +1,12 @@ | |||
| 1 | #! /bin/sh | ||
| 2 | # | ||
| 3 | # SPDX-License-Identifier: MIT | ||
| 4 | |||
| 5 | THIS_DIR="$(dirname "$0")" | ||
| 6 | |||
| 7 | VERSION="3.0.1" | ||
| 8 | |||
| 9 | shacl2code generate --input https://spdx.org/rdf/$VERSION/spdx-model.ttl \ | ||
| 10 | --input https://spdx.org/rdf/$VERSION/spdx-json-serialize-annotations.ttl \ | ||
| 11 | --context https://spdx.org/rdf/$VERSION/spdx-context.jsonld \ | ||
| 12 | python -o $THIS_DIR/../../meta/lib/oe/spdx30.py | ||
diff --git a/scripts/contrib/oe-image-files-spdx/.gitignore b/scripts/contrib/oe-image-files-spdx/.gitignore new file mode 100644 index 0000000000..285851c984 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/.gitignore | |||
| @@ -0,0 +1,8 @@ | |||
| 1 | *.spdx.json | ||
| 2 | *.pyc | ||
| 3 | *.bak | ||
| 4 | *.swp | ||
| 5 | *.swo | ||
| 6 | *.swn | ||
| 7 | venv/* | ||
| 8 | .venv/* | ||
diff --git a/scripts/contrib/oe-image-files-spdx/README.md b/scripts/contrib/oe-image-files-spdx/README.md new file mode 100644 index 0000000000..44f76eacd8 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/README.md | |||
| @@ -0,0 +1,24 @@ | |||
| 1 | # OE Image Files from SBoM | ||
| 2 | |||
| 3 | This is an example python script that will list the packaged files with their | ||
| 4 | checksums based on the SPDX 3.0.1 SBoM. | ||
| 5 | |||
| 6 | It can be used as a template for other programs to investigate output based on | ||
| 7 | OE SPDX SBoMs | ||
| 8 | |||
| 9 | ## Installation | ||
| 10 | |||
| 11 | This project can be installed using an virtual environment: | ||
| 12 | ``` | ||
| 13 | python3 -m venv .venv | ||
| 14 | .venv/bin/activate | ||
| 15 | python3 -m pip install -e '.[dev]' | ||
| 16 | ``` | ||
| 17 | |||
| 18 | ## Usage | ||
| 19 | |||
| 20 | After installing, the `oe-image-files` program can be used to show the files, e.g.: | ||
| 21 | |||
| 22 | ``` | ||
| 23 | oe-image-files core-image-minimal-qemux86-64.rootfs.spdx.json | ||
| 24 | ``` | ||
diff --git a/scripts/contrib/oe-image-files-spdx/pyproject.toml b/scripts/contrib/oe-image-files-spdx/pyproject.toml new file mode 100644 index 0000000000..3fab5dd605 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/pyproject.toml | |||
| @@ -0,0 +1,23 @@ | |||
| 1 | [project] | ||
| 2 | name = "oe-image-files" | ||
| 3 | description = "Displays all packaged files on the root file system" | ||
| 4 | dynamic = ["version"] | ||
| 5 | requires-python = ">= 3.8" | ||
| 6 | readme = "README.md" | ||
| 7 | |||
| 8 | dependencies = [ | ||
| 9 | "spdx_python_model @ git+https://github.com/spdx/spdx-python-model.git@aa40861f11d1b5d20edba7101835341a70d91179", | ||
| 10 | ] | ||
| 11 | |||
| 12 | [project.scripts] | ||
| 13 | oe-image-files = "oe_image_files:main" | ||
| 14 | |||
| 15 | [build-system] | ||
| 16 | requires = ["hatchling"] | ||
| 17 | build-backend = "hatchling.build" | ||
| 18 | |||
| 19 | [tool.hatch.version] | ||
| 20 | path = "src/oe_image_files/version.py" | ||
| 21 | |||
| 22 | [tool.hatch.metadata] | ||
| 23 | allow-direct-references = true | ||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py new file mode 100644 index 0000000000..c28a133f2d --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py | |||
| @@ -0,0 +1 @@ | |||
| from .main import main | |||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py new file mode 100644 index 0000000000..8476bf6369 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py | |||
| @@ -0,0 +1,86 @@ | |||
| 1 | # SPDX-License-Identifier: MIT | ||
| 2 | |||
| 3 | import argparse | ||
| 4 | from pathlib import Path | ||
| 5 | |||
| 6 | |||
| 7 | from spdx_python_model import v3_0_1 as spdx_3_0_1 | ||
| 8 | from .version import VERSION | ||
| 9 | |||
| 10 | |||
| 11 | def main(): | ||
| 12 | parser = argparse.ArgumentParser( | ||
| 13 | description="Show the packaged files and checksums in an OE image from the SPDX SBoM" | ||
| 14 | ) | ||
| 15 | parser.add_argument("file", help="SPDX 3 input file", type=Path) | ||
| 16 | parser.add_argument("--version", "-V", action="version", version=VERSION) | ||
| 17 | |||
| 18 | args = parser.parse_args() | ||
| 19 | |||
| 20 | # Load SPDX data from file into a new object set | ||
| 21 | objset = spdx_3_0_1.SHACLObjectSet() | ||
| 22 | with args.file.open("r") as f: | ||
| 23 | d = spdx_3_0_1.JSONLDDeserializer() | ||
| 24 | d.read(f, objset) | ||
| 25 | |||
| 26 | # Find the top level SPDX Document object | ||
| 27 | for o in objset.foreach_type(spdx_3_0_1.SpdxDocument): | ||
| 28 | doc = o | ||
| 29 | break | ||
| 30 | else: | ||
| 31 | print("ERROR: No SPDX Document found!") | ||
| 32 | return 1 | ||
| 33 | |||
| 34 | # Find the root SBoM in the document | ||
| 35 | for o in doc.rootElement: | ||
| 36 | if isinstance(o, spdx_3_0_1.software_Sbom): | ||
| 37 | sbom = o | ||
| 38 | break | ||
| 39 | else: | ||
| 40 | print("ERROR: SBoM not found in document") | ||
| 41 | return 1 | ||
| 42 | |||
| 43 | # Find the root file system package in the SBoM | ||
| 44 | for o in sbom.rootElement: | ||
| 45 | if ( | ||
| 46 | isinstance(o, spdx_3_0_1.software_Package) | ||
| 47 | and o.software_primaryPurpose == spdx_3_0_1.software_SoftwarePurpose.archive | ||
| 48 | ): | ||
| 49 | root_package = o | ||
| 50 | break | ||
| 51 | else: | ||
| 52 | print("ERROR: Package not found in document") | ||
| 53 | return 1 | ||
| 54 | |||
| 55 | # Find all relationships of type "contains" that go FROM the root file | ||
| 56 | # system | ||
| 57 | files = [] | ||
| 58 | for rel in objset.foreach_type(spdx_3_0_1.Relationship): | ||
| 59 | if not rel.relationshipType == spdx_3_0_1.RelationshipType.contains: | ||
| 60 | continue | ||
| 61 | |||
| 62 | if not rel.from_ is root_package: | ||
| 63 | continue | ||
| 64 | |||
| 65 | # Iterate over all files in the TO of the relationship | ||
| 66 | for o in rel.to: | ||
| 67 | if not isinstance(o, spdx_3_0_1.software_File): | ||
| 68 | continue | ||
| 69 | |||
| 70 | # Find the SHA 256 hash of the file (if any) | ||
| 71 | for h in o.verifiedUsing: | ||
| 72 | if ( | ||
| 73 | isinstance(h, spdx_3_0_1.Hash) | ||
| 74 | and h.algorithm == spdx_3_0_1.HashAlgorithm.sha256 | ||
| 75 | ): | ||
| 76 | files.append((o.name, h.hashValue)) | ||
| 77 | break | ||
| 78 | else: | ||
| 79 | files.append((o.name, "")) | ||
| 80 | |||
| 81 | # Print files | ||
| 82 | files.sort(key=lambda x: x[0]) | ||
| 83 | for name, hash_val in files: | ||
| 84 | print(f"{name} - {hash_val}") | ||
| 85 | |||
| 86 | return 0 | ||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py new file mode 100644 index 0000000000..901e5110b2 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py | |||
| @@ -0,0 +1 @@ | |||
| VERSION = "0.0.1" | |||
diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py index bceae06561..d8d7b214e5 100755 --- a/scripts/contrib/patchreview.py +++ b/scripts/contrib/patchreview.py | |||
| @@ -14,6 +14,10 @@ import pathlib | |||
| 14 | import re | 14 | import re |
| 15 | import subprocess | 15 | import subprocess |
| 16 | 16 | ||
| 17 | import sys | ||
| 18 | sys.path.append(os.path.join(sys.path[0], '../../meta/lib')) | ||
| 19 | import oe.qa | ||
| 20 | |||
| 17 | # TODO | 21 | # TODO |
| 18 | # - option to just list all broken files | 22 | # - option to just list all broken files |
| 19 | # - test suite | 23 | # - test suite |
| @@ -47,7 +51,7 @@ def blame_patch(patch): | |||
| 47 | return subprocess.check_output(("git", "log", | 51 | return subprocess.check_output(("git", "log", |
| 48 | "--follow", "--find-renames", "--diff-filter=A", | 52 | "--follow", "--find-renames", "--diff-filter=A", |
| 49 | "--format=%s (%aN <%aE>)", | 53 | "--format=%s (%aN <%aE>)", |
| 50 | "--", patch)).decode("utf-8").splitlines() | 54 | "--", patch), cwd=os.path.dirname(patch)).decode("utf-8").splitlines() |
| 51 | 55 | ||
| 52 | def patchreview(patches): | 56 | def patchreview(patches): |
| 53 | 57 | ||
| @@ -78,12 +82,11 @@ def patchreview(patches): | |||
| 78 | else: | 82 | else: |
| 79 | result.missing_sob = True | 83 | result.missing_sob = True |
| 80 | 84 | ||
| 81 | |||
| 82 | # Find the Upstream-Status tag | 85 | # Find the Upstream-Status tag |
| 83 | match = status_re.search(content) | 86 | match = status_re.search(content) |
| 84 | if match: | 87 | if match: |
| 85 | value = match.group(1) | 88 | value = oe.qa.check_upstream_status(patch) |
| 86 | if value != "Upstream-Status:": | 89 | if value: |
| 87 | result.malformed_upstream_status = value | 90 | result.malformed_upstream_status = value |
| 88 | 91 | ||
| 89 | value = match.group(2).lower() | 92 | value = match.group(2).lower() |
diff --git a/scripts/cve-json-to-text.py b/scripts/cve-json-to-text.py new file mode 100755 index 0000000000..8d309b37e5 --- /dev/null +++ b/scripts/cve-json-to-text.py | |||
| @@ -0,0 +1,146 @@ | |||
| 1 | #!/bin/env python3 | ||
| 2 | # SPDX-FileCopyrightText: OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: MIT | ||
| 5 | |||
| 6 | # CVE results conversion script: JSON format to text | ||
| 7 | # Derived from cve-report.py from Oniro (MIT, by Huawei Inc) | ||
| 8 | |||
| 9 | import sys | ||
| 10 | import getopt | ||
| 11 | |||
| 12 | infile = "in.json" | ||
| 13 | outfile = "out.txt" | ||
| 14 | |||
| 15 | |||
| 16 | def show_syntax_and_exit(code): | ||
| 17 | """ | ||
| 18 | Show the program syntax and exit with an errror | ||
| 19 | Arguments: | ||
| 20 | code: the error code to return | ||
| 21 | """ | ||
| 22 | print("Syntax: %s [-h] [-i inputJSONfile][-o outputfile]" % sys.argv[0]) | ||
| 23 | sys.exit(code) | ||
| 24 | |||
| 25 | |||
| 26 | def exit_error(code, message): | ||
| 27 | """ | ||
| 28 | Show the error message and exit with an errror | ||
| 29 | Arguments: | ||
| 30 | code: the error code to return | ||
| 31 | message: the message to show | ||
| 32 | """ | ||
| 33 | print("Error: %s" % message) | ||
| 34 | sys.exit(code) | ||
| 35 | |||
| 36 | |||
| 37 | def parse_args(argv): | ||
| 38 | """ | ||
| 39 | Parse the program arguments, put options in global variables | ||
| 40 | Arguments: | ||
| 41 | argv: program arguments | ||
| 42 | """ | ||
| 43 | global infile, outfile | ||
| 44 | try: | ||
| 45 | opts, args = getopt.getopt( | ||
| 46 | argv, "hi:o:", ["help", "input", "output"] | ||
| 47 | ) | ||
| 48 | except getopt.GetoptError: | ||
| 49 | show_syntax_and_exit(1) | ||
| 50 | for opt, arg in opts: | ||
| 51 | if opt in ("-h"): | ||
| 52 | show_syntax_and_exit(0) | ||
| 53 | elif opt in ("-i"): | ||
| 54 | infile = arg | ||
| 55 | elif opt in ("-o"): | ||
| 56 | outfile = arg | ||
| 57 | |||
| 58 | def load_json(filename): | ||
| 59 | """ | ||
| 60 | Load the JSON file, return the resulting dictionary | ||
| 61 | Arguments: | ||
| 62 | filename: the file to open | ||
| 63 | Returns: | ||
| 64 | Parsed file as a dictionary | ||
| 65 | """ | ||
| 66 | import json | ||
| 67 | |||
| 68 | out = {} | ||
| 69 | try: | ||
| 70 | with open(filename, "r") as f: | ||
| 71 | out = json.load(f) | ||
| 72 | except FileNotFoundError: | ||
| 73 | exit_error(1, "Input file (%s) not found" % (filename)) | ||
| 74 | except json.decoder.JSONDecodeError as error: | ||
| 75 | exit_error(1, "Malformed JSON file: %s" % str(error)) | ||
| 76 | return out | ||
| 77 | |||
| 78 | |||
| 79 | def process_data(filename, data): | ||
| 80 | """ | ||
| 81 | Write the resulting CSV with one line for each package | ||
| 82 | Arguments: | ||
| 83 | filename: the file to write to | ||
| 84 | data: dictionary from parsing the JSON file | ||
| 85 | Returns: | ||
| 86 | None | ||
| 87 | """ | ||
| 88 | if not "version" in data or data["version"] != "1": | ||
| 89 | exit_error(1, "Unrecognized format version number") | ||
| 90 | if not "package" in data: | ||
| 91 | exit_error(1, "Mandatory 'package' key not found") | ||
| 92 | |||
| 93 | lines = "" | ||
| 94 | total_issue_count = 0 | ||
| 95 | for package in data["package"]: | ||
| 96 | package_info = "" | ||
| 97 | keys_in_package = {"name", "layer", "version", "issue"} | ||
| 98 | if keys_in_package - package.keys(): | ||
| 99 | exit_error( | ||
| 100 | 1, | ||
| 101 | "Missing a mandatory key in package: %s" | ||
| 102 | % (keys_in_package - package.keys()), | ||
| 103 | ) | ||
| 104 | |||
| 105 | package_info += "LAYER: %s\n" % package["layer"] | ||
| 106 | package_info += "PACKAGE NAME: %s\n" % package["name"] | ||
| 107 | package_info += "PACKAGE VERSION: %s\n" % package["version"] | ||
| 108 | |||
| 109 | for issue in package["issue"]: | ||
| 110 | keys_in_issue = {"id", "status", "detail"} | ||
| 111 | if keys_in_issue - issue.keys(): | ||
| 112 | print("Warning: Missing keys %s in 'issue' for the package '%s'" | ||
| 113 | % (keys_in_issue - issue.keys(), package["name"])) | ||
| 114 | |||
| 115 | lines += package_info | ||
| 116 | lines += "CVE: %s\n" % issue["id"] | ||
| 117 | lines += "CVE STATUS: %s\n" % issue["status"] | ||
| 118 | lines += "CVE DETAIL: %s\n" % issue["detail"] | ||
| 119 | if "description" in issue: | ||
| 120 | lines += "CVE DESCRIPTION: %s\n" % issue["description"] | ||
| 121 | if "summary" in issue: | ||
| 122 | lines += "CVE SUMMARY: %s\n" % issue["summary"] | ||
| 123 | if "scorev2" in issue: | ||
| 124 | lines += "CVSS v2 BASE SCORE: %s\n" % issue["scorev2"] | ||
| 125 | if "scorev3" in issue: | ||
| 126 | lines += "CVSS v3 BASE SCORE: %s\n" % issue["scorev3"] | ||
| 127 | if "scorev4" in issue: | ||
| 128 | lines += "CVSS v4 BASE SCORE: %s\n" % issue["scorev4"] | ||
| 129 | if "vector" in issue: | ||
| 130 | lines += "VECTOR: %s\n" % issue["vector"] | ||
| 131 | if "vectorString" in issue: | ||
| 132 | lines += "VECTORSTRING: %s\n" % issue["vectorString"] | ||
| 133 | lines += "MORE INFORMATION: https://nvd.nist.gov/vuln/detail/%s\n" % issue["id"] | ||
| 134 | lines += "\n" | ||
| 135 | |||
| 136 | with open(filename, "w") as f: | ||
| 137 | f.write(lines) | ||
| 138 | |||
| 139 | def main(argv): | ||
| 140 | parse_args(argv) | ||
| 141 | data = load_json(infile) | ||
| 142 | process_data(outfile, data) | ||
| 143 | |||
| 144 | |||
| 145 | if __name__ == "__main__": | ||
| 146 | main(sys.argv[1:]) | ||
diff --git a/scripts/devtool b/scripts/devtool index 60ea3e8298..39cebec0d8 100755 --- a/scripts/devtool +++ b/scripts/devtool | |||
| @@ -7,19 +7,17 @@ | |||
| 7 | # SPDX-License-Identifier: GPL-2.0-only | 7 | # SPDX-License-Identifier: GPL-2.0-only |
| 8 | # | 8 | # |
| 9 | 9 | ||
| 10 | import dataclasses | ||
| 10 | import sys | 11 | import sys |
| 11 | import os | 12 | import os |
| 12 | import argparse | 13 | import argparse |
| 13 | import glob | 14 | import glob |
| 14 | import re | 15 | import re |
| 15 | import configparser | 16 | import configparser |
| 16 | import subprocess | ||
| 17 | import logging | 17 | import logging |
| 18 | 18 | ||
| 19 | basepath = '' | 19 | # This can be removed once our minimum is Python 3.9: https://docs.python.org/3/whatsnew/3.9.html#type-hinting-generics-in-standard-collections |
| 20 | workspace = {} | 20 | from typing import List |
| 21 | config = None | ||
| 22 | context = None | ||
| 23 | 21 | ||
| 24 | 22 | ||
| 25 | scripts_path = os.path.dirname(os.path.realpath(__file__)) | 23 | scripts_path = os.path.dirname(os.path.realpath(__file__)) |
| @@ -30,16 +28,16 @@ import scriptutils | |||
| 30 | import argparse_oe | 28 | import argparse_oe |
| 31 | logger = scriptutils.logger_create('devtool') | 29 | logger = scriptutils.logger_create('devtool') |
| 32 | 30 | ||
| 33 | plugins = [] | ||
| 34 | 31 | ||
| 35 | 32 | class ConfigHandler: | |
| 36 | class ConfigHandler(object): | 33 | basepath = None |
| 37 | config_file = '' | 34 | config_file = '' |
| 38 | config_obj = None | 35 | config_obj = None |
| 39 | init_path = '' | 36 | init_path = '' |
| 40 | workspace_path = '' | 37 | workspace_path = '' |
| 41 | 38 | ||
| 42 | def __init__(self, filename): | 39 | def __init__(self, basepath, filename): |
| 40 | self.basepath = basepath | ||
| 43 | self.config_file = filename | 41 | self.config_file = filename |
| 44 | self.config_obj = configparser.ConfigParser() | 42 | self.config_obj = configparser.ConfigParser() |
| 45 | 43 | ||
| @@ -47,7 +45,7 @@ class ConfigHandler(object): | |||
| 47 | try: | 45 | try: |
| 48 | ret = self.config_obj.get(section, option) | 46 | ret = self.config_obj.get(section, option) |
| 49 | except (configparser.NoOptionError, configparser.NoSectionError): | 47 | except (configparser.NoOptionError, configparser.NoSectionError): |
| 50 | if default != None: | 48 | if default is not None: |
| 51 | ret = default | 49 | ret = default |
| 52 | else: | 50 | else: |
| 53 | raise | 51 | raise |
| @@ -59,14 +57,14 @@ class ConfigHandler(object): | |||
| 59 | 57 | ||
| 60 | if self.config_obj.has_option('General', 'init_path'): | 58 | if self.config_obj.has_option('General', 'init_path'): |
| 61 | pth = self.get('General', 'init_path') | 59 | pth = self.get('General', 'init_path') |
| 62 | self.init_path = os.path.join(basepath, pth) | 60 | self.init_path = os.path.join(self.basepath, pth) |
| 63 | if not os.path.exists(self.init_path): | 61 | if not os.path.exists(self.init_path): |
| 64 | logger.error('init_path %s specified in config file cannot be found' % pth) | 62 | logger.error('init_path %s specified in config file cannot be found' % pth) |
| 65 | return False | 63 | return False |
| 66 | else: | 64 | else: |
| 67 | self.config_obj.add_section('General') | 65 | self.config_obj.add_section('General') |
| 68 | 66 | ||
| 69 | self.workspace_path = self.get('General', 'workspace_path', os.path.join(basepath, 'workspace')) | 67 | self.workspace_path = self.get('General', 'workspace_path', os.path.join(self.basepath, 'workspace')) |
| 70 | return True | 68 | return True |
| 71 | 69 | ||
| 72 | 70 | ||
| @@ -81,27 +79,29 @@ class ConfigHandler(object): | |||
| 81 | self.config_obj.add_section(section) | 79 | self.config_obj.add_section(section) |
| 82 | self.config_obj.set(section, option, value) | 80 | self.config_obj.set(section, option, value) |
| 83 | 81 | ||
| 82 | |||
| 83 | @dataclasses.dataclass | ||
| 84 | class Context: | 84 | class Context: |
| 85 | def __init__(self, **kwargs): | 85 | fixed_setup: bool |
| 86 | self.__dict__.update(kwargs) | 86 | config: ConfigHandler |
| 87 | pluginpaths: List[str] | ||
| 87 | 88 | ||
| 88 | 89 | ||
| 89 | def read_workspace(): | 90 | def read_workspace(basepath, context): |
| 90 | global workspace | ||
| 91 | workspace = {} | 91 | workspace = {} |
| 92 | if not os.path.exists(os.path.join(config.workspace_path, 'conf', 'layer.conf')): | 92 | if not os.path.exists(os.path.join(context.config.workspace_path, 'conf', 'layer.conf')): |
| 93 | if context.fixed_setup: | 93 | if context.fixed_setup: |
| 94 | logger.error("workspace layer not set up") | 94 | logger.error("workspace layer not set up") |
| 95 | sys.exit(1) | 95 | sys.exit(1) |
| 96 | else: | 96 | else: |
| 97 | logger.info('Creating workspace layer in %s' % config.workspace_path) | 97 | logger.info('Creating workspace layer in %s' % context.config.workspace_path) |
| 98 | _create_workspace(config.workspace_path, config, basepath) | 98 | _create_workspace(context.config.workspace_path, basepath) |
| 99 | if not context.fixed_setup: | 99 | if not context.fixed_setup: |
| 100 | _enable_workspace_layer(config.workspace_path, config, basepath) | 100 | _enable_workspace_layer(context.config.workspace_path, context.config, basepath) |
| 101 | 101 | ||
| 102 | logger.debug('Reading workspace in %s' % config.workspace_path) | 102 | logger.debug('Reading workspace in %s' % context.config.workspace_path) |
| 103 | externalsrc_re = re.compile(r'^EXTERNALSRC(:pn-([^ =]+))? *= *"([^"]*)"$') | 103 | externalsrc_re = re.compile(r'^EXTERNALSRC(:pn-([^ =]+))? *= *"([^"]*)"$') |
| 104 | for fn in glob.glob(os.path.join(config.workspace_path, 'appends', '*.bbappend')): | 104 | for fn in glob.glob(os.path.join(context.config.workspace_path, 'appends', '*.bbappend')): |
| 105 | with open(fn, 'r') as f: | 105 | with open(fn, 'r') as f: |
| 106 | pnvalues = {} | 106 | pnvalues = {} |
| 107 | pn = None | 107 | pn = None |
| @@ -112,7 +112,7 @@ def read_workspace(): | |||
| 112 | pn = res.group(2) or recipepn | 112 | pn = res.group(2) or recipepn |
| 113 | # Find the recipe file within the workspace, if any | 113 | # Find the recipe file within the workspace, if any |
| 114 | bbfile = os.path.basename(fn).replace('.bbappend', '.bb').replace('%', '*') | 114 | bbfile = os.path.basename(fn).replace('.bbappend', '.bb').replace('%', '*') |
| 115 | recipefile = glob.glob(os.path.join(config.workspace_path, | 115 | recipefile = glob.glob(os.path.join(context.config.workspace_path, |
| 116 | 'recipes', | 116 | 'recipes', |
| 117 | recipepn, | 117 | recipepn, |
| 118 | bbfile)) | 118 | bbfile)) |
| @@ -126,13 +126,15 @@ def read_workspace(): | |||
| 126 | if pnvalues: | 126 | if pnvalues: |
| 127 | if not pn: | 127 | if not pn: |
| 128 | raise DevtoolError("Found *.bbappend in %s, but could not determine EXTERNALSRC:pn-*. " | 128 | raise DevtoolError("Found *.bbappend in %s, but could not determine EXTERNALSRC:pn-*. " |
| 129 | "Maybe still using old syntax?" % config.workspace_path) | 129 | "Maybe still using old syntax?" % context.config.workspace_path) |
| 130 | if not pnvalues.get('srctreebase', None): | 130 | if not pnvalues.get('srctreebase', None): |
| 131 | pnvalues['srctreebase'] = pnvalues['srctree'] | 131 | pnvalues['srctreebase'] = pnvalues['srctree'] |
| 132 | logger.debug('Found recipe %s' % pnvalues) | 132 | logger.debug('Found recipe %s' % pnvalues) |
| 133 | workspace[pn] = pnvalues | 133 | workspace[pn] = pnvalues |
| 134 | 134 | ||
| 135 | def create_workspace(args, config, basepath, workspace): | 135 | return workspace |
| 136 | |||
| 137 | def create_workspace(args, config, basepath, _workspace): | ||
| 136 | if args.layerpath: | 138 | if args.layerpath: |
| 137 | workspacedir = os.path.abspath(args.layerpath) | 139 | workspacedir = os.path.abspath(args.layerpath) |
| 138 | else: | 140 | else: |
| @@ -140,12 +142,12 @@ def create_workspace(args, config, basepath, workspace): | |||
| 140 | layerseries = None | 142 | layerseries = None |
| 141 | if args.layerseries: | 143 | if args.layerseries: |
| 142 | layerseries = args.layerseries | 144 | layerseries = args.layerseries |
| 143 | _create_workspace(workspacedir, config, basepath, layerseries) | 145 | _create_workspace(workspacedir, basepath, layerseries) |
| 144 | if not args.create_only: | 146 | if not args.create_only: |
| 145 | _enable_workspace_layer(workspacedir, config, basepath) | 147 | _enable_workspace_layer(workspacedir, config, basepath) |
| 146 | 148 | ||
| 147 | def _create_workspace(workspacedir, config, basepath, layerseries=None): | 149 | def _create_workspace(workspacedir, basepath, layerseries=None): |
| 148 | import bb | 150 | import bb.utils |
| 149 | 151 | ||
| 150 | confdir = os.path.join(workspacedir, 'conf') | 152 | confdir = os.path.join(workspacedir, 'conf') |
| 151 | if os.path.exists(os.path.join(confdir, 'layer.conf')): | 153 | if os.path.exists(os.path.join(confdir, 'layer.conf')): |
| @@ -190,7 +192,7 @@ def _create_workspace(workspacedir, config, basepath, layerseries=None): | |||
| 190 | 192 | ||
| 191 | def _enable_workspace_layer(workspacedir, config, basepath): | 193 | def _enable_workspace_layer(workspacedir, config, basepath): |
| 192 | """Ensure the workspace layer is in bblayers.conf""" | 194 | """Ensure the workspace layer is in bblayers.conf""" |
| 193 | import bb | 195 | import bb.utils |
| 194 | bblayers_conf = os.path.join(basepath, 'conf', 'bblayers.conf') | 196 | bblayers_conf = os.path.join(basepath, 'conf', 'bblayers.conf') |
| 195 | if not os.path.exists(bblayers_conf): | 197 | if not os.path.exists(bblayers_conf): |
| 196 | logger.error('Unable to find bblayers.conf') | 198 | logger.error('Unable to find bblayers.conf') |
| @@ -209,15 +211,9 @@ def _enable_workspace_layer(workspacedir, config, basepath): | |||
| 209 | 211 | ||
| 210 | 212 | ||
| 211 | def main(): | 213 | def main(): |
| 212 | global basepath | ||
| 213 | global config | ||
| 214 | global context | ||
| 215 | |||
| 216 | if sys.getfilesystemencoding() != "utf-8": | 214 | if sys.getfilesystemencoding() != "utf-8": |
| 217 | sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.") | 215 | sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.") |
| 218 | 216 | ||
| 219 | context = Context(fixed_setup=False) | ||
| 220 | |||
| 221 | # Default basepath | 217 | # Default basepath |
| 222 | basepath = os.path.dirname(os.path.abspath(__file__)) | 218 | basepath = os.path.dirname(os.path.abspath(__file__)) |
| 223 | 219 | ||
| @@ -242,21 +238,23 @@ def main(): | |||
| 242 | elif global_args.quiet: | 238 | elif global_args.quiet: |
| 243 | logger.setLevel(logging.ERROR) | 239 | logger.setLevel(logging.ERROR) |
| 244 | 240 | ||
| 241 | is_fixed_setup = False | ||
| 242 | |||
| 245 | if global_args.basepath: | 243 | if global_args.basepath: |
| 246 | # Override | 244 | # Override |
| 247 | basepath = global_args.basepath | 245 | basepath = global_args.basepath |
| 248 | if os.path.exists(os.path.join(basepath, '.devtoolbase')): | 246 | if os.path.exists(os.path.join(basepath, '.devtoolbase')): |
| 249 | context.fixed_setup = True | 247 | is_fixed_setup = True |
| 250 | else: | 248 | else: |
| 251 | pth = basepath | 249 | pth = basepath |
| 252 | while pth != '' and pth != os.sep: | 250 | while pth != '' and pth != os.sep: |
| 253 | if os.path.exists(os.path.join(pth, '.devtoolbase')): | 251 | if os.path.exists(os.path.join(pth, '.devtoolbase')): |
| 254 | context.fixed_setup = True | 252 | is_fixed_setup = True |
| 255 | basepath = pth | 253 | basepath = pth |
| 256 | break | 254 | break |
| 257 | pth = os.path.dirname(pth) | 255 | pth = os.path.dirname(pth) |
| 258 | 256 | ||
| 259 | if not context.fixed_setup: | 257 | if not is_fixed_setup: |
| 260 | basepath = os.environ.get('BUILDDIR') | 258 | basepath = os.environ.get('BUILDDIR') |
| 261 | if not basepath: | 259 | if not basepath: |
| 262 | logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)") | 260 | logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)") |
| @@ -264,10 +262,9 @@ def main(): | |||
| 264 | 262 | ||
| 265 | logger.debug('Using basepath %s' % basepath) | 263 | logger.debug('Using basepath %s' % basepath) |
| 266 | 264 | ||
| 267 | config = ConfigHandler(os.path.join(basepath, 'conf', 'devtool.conf')) | 265 | config = ConfigHandler(basepath, os.path.join(basepath, 'conf', 'devtool.conf')) |
| 268 | if not config.read(): | 266 | if not config.read(): |
| 269 | return -1 | 267 | return -1 |
| 270 | context.config = config | ||
| 271 | 268 | ||
| 272 | bitbake_subdir = config.get('General', 'bitbake_subdir', '') | 269 | bitbake_subdir = config.get('General', 'bitbake_subdir', '') |
| 273 | if bitbake_subdir: | 270 | if bitbake_subdir: |
| @@ -289,6 +286,7 @@ def main(): | |||
| 289 | scriptutils.logger_setup_color(logger, global_args.color) | 286 | scriptutils.logger_setup_color(logger, global_args.color) |
| 290 | 287 | ||
| 291 | if global_args.bbpath is None: | 288 | if global_args.bbpath is None: |
| 289 | import bb | ||
| 292 | try: | 290 | try: |
| 293 | tinfoil = setup_tinfoil(config_only=True, basepath=basepath) | 291 | tinfoil = setup_tinfoil(config_only=True, basepath=basepath) |
| 294 | try: | 292 | try: |
| @@ -300,7 +298,10 @@ def main(): | |||
| 300 | 298 | ||
| 301 | # Search BBPATH first to allow layers to override plugins in scripts_path | 299 | # Search BBPATH first to allow layers to override plugins in scripts_path |
| 302 | pluginpaths = [os.path.join(path, 'lib', 'devtool') for path in global_args.bbpath.split(':') + [scripts_path]] | 300 | pluginpaths = [os.path.join(path, 'lib', 'devtool') for path in global_args.bbpath.split(':') + [scripts_path]] |
| 303 | context.pluginpaths = pluginpaths | 301 | |
| 302 | context = Context(fixed_setup=is_fixed_setup, config=config, pluginpaths=pluginpaths) | ||
| 303 | |||
| 304 | plugins = [] | ||
| 304 | for pluginpath in pluginpaths: | 305 | for pluginpath in pluginpaths: |
| 305 | scriptutils.load_plugins(logger, plugins, pluginpath) | 306 | scriptutils.load_plugins(logger, plugins, pluginpath) |
| 306 | 307 | ||
| @@ -331,9 +332,9 @@ def main(): | |||
| 331 | args = parser.parse_args(unparsed_args, namespace=global_args) | 332 | args = parser.parse_args(unparsed_args, namespace=global_args) |
| 332 | 333 | ||
| 333 | try: | 334 | try: |
| 335 | workspace = {} | ||
| 334 | if not getattr(args, 'no_workspace', False): | 336 | if not getattr(args, 'no_workspace', False): |
| 335 | read_workspace() | 337 | workspace = read_workspace(basepath, context) |
| 336 | |||
| 337 | ret = args.func(args, config, basepath, workspace) | 338 | ret = args.func(args, config, basepath, workspace) |
| 338 | except DevtoolError as err: | 339 | except DevtoolError as err: |
| 339 | if str(err): | 340 | if str(err): |
| @@ -341,6 +342,7 @@ def main(): | |||
| 341 | ret = err.exitcode | 342 | ret = err.exitcode |
| 342 | except argparse_oe.ArgumentUsageError as ae: | 343 | except argparse_oe.ArgumentUsageError as ae: |
| 343 | parser.error_subcommand(ae.message, ae.subcommand) | 344 | parser.error_subcommand(ae.message, ae.subcommand) |
| 345 | ret = 2 | ||
| 344 | 346 | ||
| 345 | return ret | 347 | return ret |
| 346 | 348 | ||
diff --git a/scripts/gen-site-config b/scripts/gen-site-config deleted file mode 100755 index 727b809c0f..0000000000 --- a/scripts/gen-site-config +++ /dev/null | |||
| @@ -1,43 +0,0 @@ | |||
| 1 | #! /bin/sh | ||
| 2 | # Copyright (c) 2005-2008 Wind River Systems, Inc. | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | cat << EOF | ||
| 8 | AC_PREREQ(2.57) | ||
| 9 | AC_INIT([site_wide],[1.0.0]) | ||
| 10 | |||
| 11 | EOF | ||
| 12 | |||
| 13 | # Disable as endian is set in the default config | ||
| 14 | #echo AC_C_BIGENDIAN | ||
| 15 | #echo | ||
| 16 | |||
| 17 | if [ -e $1/types ] ; then | ||
| 18 | while read type ; do | ||
| 19 | echo "AC_CHECK_SIZEOF([$type])" | ||
| 20 | done < $1/types | ||
| 21 | |||
| 22 | echo | ||
| 23 | fi | ||
| 24 | |||
| 25 | if [ -e $1/funcs ]; then | ||
| 26 | while read func ; do | ||
| 27 | echo "AC_CHECK_FUNCS([$func])" | ||
| 28 | done < $1/funcs | ||
| 29 | |||
| 30 | echo | ||
| 31 | fi | ||
| 32 | |||
| 33 | if [ -e $1/headers ]; then | ||
| 34 | while read header ; do | ||
| 35 | echo "AC_CHECK_HEADERS([$header])" | ||
| 36 | done < $1/headers | ||
| 37 | |||
| 38 | echo | ||
| 39 | fi | ||
| 40 | |||
| 41 | cat << EOF | ||
| 42 | AC_OUTPUT | ||
| 43 | EOF | ||
diff --git a/scripts/install-buildtools b/scripts/install-buildtools index 2218f3ffac..93f3e2d678 100755 --- a/scripts/install-buildtools +++ b/scripts/install-buildtools | |||
| @@ -56,9 +56,9 @@ PROGNAME = 'install-buildtools' | |||
| 56 | logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout) | 56 | logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout) |
| 57 | 57 | ||
| 58 | DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools') | 58 | DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools') |
| 59 | DEFAULT_BASE_URL = 'http://downloads.yoctoproject.org/releases/yocto' | 59 | DEFAULT_BASE_URL = 'https://downloads.yoctoproject.org/releases/yocto' |
| 60 | DEFAULT_RELEASE = 'yocto-4.1' | 60 | DEFAULT_RELEASE = 'yocto-5.2.3' |
| 61 | DEFAULT_INSTALLER_VERSION = '4.1' | 61 | DEFAULT_INSTALLER_VERSION = '5.2.3' |
| 62 | DEFAULT_BUILDDATE = '202110XX' | 62 | DEFAULT_BUILDDATE = '202110XX' |
| 63 | 63 | ||
| 64 | # Python version sanity check | 64 | # Python version sanity check |
| @@ -102,6 +102,16 @@ def sha256_file(filename): | |||
| 102 | import hashlib | 102 | import hashlib |
| 103 | return _hasher(hashlib.sha256(), filename) | 103 | return _hasher(hashlib.sha256(), filename) |
| 104 | 104 | ||
| 105 | def remove_quotes(var): | ||
| 106 | """ | ||
| 107 | If a variable starts and ends with double quotes, remove them. | ||
| 108 | Assumption: if a variable starts with double quotes, it must also | ||
| 109 | end with them. | ||
| 110 | """ | ||
| 111 | if var[0] == '"': | ||
| 112 | var = var[1:-1] | ||
| 113 | return var | ||
| 114 | |||
| 105 | 115 | ||
| 106 | def main(): | 116 | def main(): |
| 107 | global DEFAULT_INSTALL_DIR | 117 | global DEFAULT_INSTALL_DIR |
| @@ -117,7 +127,8 @@ def main(): | |||
| 117 | 127 | ||
| 118 | parser = argparse.ArgumentParser( | 128 | parser = argparse.ArgumentParser( |
| 119 | description="Buildtools installation helper", | 129 | description="Buildtools installation helper", |
| 120 | add_help=False) | 130 | add_help=False, |
| 131 | formatter_class=argparse.RawTextHelpFormatter) | ||
| 121 | parser.add_argument('-u', '--url', | 132 | parser.add_argument('-u', '--url', |
| 122 | help='URL from where to fetch buildtools SDK installer, not ' | 133 | help='URL from where to fetch buildtools SDK installer, not ' |
| 123 | 'including filename (optional)\n' | 134 | 'including filename (optional)\n' |
| @@ -131,6 +142,9 @@ def main(): | |||
| 131 | default=DEFAULT_INSTALL_DIR, | 142 | default=DEFAULT_INSTALL_DIR, |
| 132 | help='directory where buildtools SDK will be installed (optional)', | 143 | help='directory where buildtools SDK will be installed (optional)', |
| 133 | action='store') | 144 | action='store') |
| 145 | parser.add_argument('--downloads-directory', | ||
| 146 | help='use this directory for tarball/checksum downloads and do not erase them (default is a temporary directory which is deleted after unpacking and installing the buildtools)', | ||
| 147 | action='store') | ||
| 134 | parser.add_argument('-r', '--release', | 148 | parser.add_argument('-r', '--release', |
| 135 | default=DEFAULT_RELEASE, | 149 | default=DEFAULT_RELEASE, |
| 136 | help='Yocto Project release string for SDK which will be ' | 150 | help='Yocto Project release string for SDK which will be ' |
| @@ -224,11 +238,14 @@ def main(): | |||
| 224 | safe_filename = quote(filename) | 238 | safe_filename = quote(filename) |
| 225 | buildtools_url = "%s/%s/buildtools/%s" % (base_url, args.release, safe_filename) | 239 | buildtools_url = "%s/%s/buildtools/%s" % (base_url, args.release, safe_filename) |
| 226 | 240 | ||
| 227 | tmpsdk_dir = tempfile.mkdtemp() | 241 | sdk_dir = args.downloads_directory or tempfile.mkdtemp() |
| 242 | os.makedirs(sdk_dir, exist_ok=True) | ||
| 228 | try: | 243 | try: |
| 229 | # Fetch installer | 244 | # Fetch installer |
| 230 | logger.info("Fetching buildtools installer") | 245 | logger.info("Fetching buildtools installer") |
| 231 | tmpbuildtools = os.path.join(tmpsdk_dir, filename) | 246 | tmpbuildtools = os.path.join(sdk_dir, filename) |
| 247 | with open(os.path.join(sdk_dir, 'buildtools_url'), 'w') as f: | ||
| 248 | f.write(buildtools_url) | ||
| 232 | ret = subprocess.call("wget -q -O %s %s" % | 249 | ret = subprocess.call("wget -q -O %s %s" % |
| 233 | (tmpbuildtools, buildtools_url), shell=True) | 250 | (tmpbuildtools, buildtools_url), shell=True) |
| 234 | if ret != 0: | 251 | if ret != 0: |
| @@ -238,19 +255,17 @@ def main(): | |||
| 238 | # Verify checksum | 255 | # Verify checksum |
| 239 | if args.check: | 256 | if args.check: |
| 240 | logger.info("Fetching buildtools installer checksum") | 257 | logger.info("Fetching buildtools installer checksum") |
| 241 | checksum_type = "" | 258 | checksum_type = "sha256sum" |
| 242 | for checksum_type in ["md5sum", "sha256sum"]: | 259 | checksum_url = "{}.{}".format(buildtools_url, checksum_type) |
| 243 | check_url = "{}.{}".format(buildtools_url, checksum_type) | 260 | checksum_filename = "{}.{}".format(filename, checksum_type) |
| 244 | checksum_filename = "{}.{}".format(filename, checksum_type) | 261 | tmpbuildtools_checksum = os.path.join(sdk_dir, checksum_filename) |
| 245 | tmpbuildtools_checksum = os.path.join(tmpsdk_dir, checksum_filename) | 262 | with open(os.path.join(sdk_dir, 'checksum_url'), 'w') as f: |
| 246 | ret = subprocess.call("wget -q -O %s %s" % | 263 | f.write(checksum_url) |
| 247 | (tmpbuildtools_checksum, check_url), shell=True) | 264 | ret = subprocess.call("wget -q -O %s %s" % |
| 248 | if ret == 0: | 265 | (tmpbuildtools_checksum, checksum_url), shell=True) |
| 249 | break | 266 | if ret != 0: |
| 250 | else: | 267 | logger.error("Could not download file from %s" % checksum_url) |
| 251 | if ret != 0: | 268 | return ret |
| 252 | logger.error("Could not download file from %s" % check_url) | ||
| 253 | return ret | ||
| 254 | regex = re.compile(r"^(?P<checksum>[0-9a-f]+)\s+(?P<path>.*/)?(?P<filename>.*)$") | 269 | regex = re.compile(r"^(?P<checksum>[0-9a-f]+)\s+(?P<path>.*/)?(?P<filename>.*)$") |
| 255 | with open(tmpbuildtools_checksum, 'rb') as f: | 270 | with open(tmpbuildtools_checksum, 'rb') as f: |
| 256 | original = f.read() | 271 | original = f.read() |
| @@ -263,10 +278,7 @@ def main(): | |||
| 263 | logger.error("Filename does not match name in checksum") | 278 | logger.error("Filename does not match name in checksum") |
| 264 | return 1 | 279 | return 1 |
| 265 | checksum = m.group('checksum') | 280 | checksum = m.group('checksum') |
| 266 | if checksum_type == "md5sum": | 281 | checksum_value = sha256_file(tmpbuildtools) |
| 267 | checksum_value = md5_file(tmpbuildtools) | ||
| 268 | else: | ||
| 269 | checksum_value = sha256_file(tmpbuildtools) | ||
| 270 | if checksum == checksum_value: | 282 | if checksum == checksum_value: |
| 271 | logger.info("Checksum success") | 283 | logger.info("Checksum success") |
| 272 | else: | 284 | else: |
| @@ -280,7 +292,7 @@ def main(): | |||
| 280 | os.chmod(tmpbuildtools, st.st_mode | stat.S_IEXEC) | 292 | os.chmod(tmpbuildtools, st.st_mode | stat.S_IEXEC) |
| 281 | logger.debug(os.stat(tmpbuildtools)) | 293 | logger.debug(os.stat(tmpbuildtools)) |
| 282 | if args.directory: | 294 | if args.directory: |
| 283 | install_dir = args.directory | 295 | install_dir = os.path.abspath(args.directory) |
| 284 | ret = subprocess.call("%s -d %s -y" % | 296 | ret = subprocess.call("%s -d %s -y" % |
| 285 | (tmpbuildtools, install_dir), shell=True) | 297 | (tmpbuildtools, install_dir), shell=True) |
| 286 | else: | 298 | else: |
| @@ -301,7 +313,7 @@ def main(): | |||
| 301 | if match: | 313 | if match: |
| 302 | env_var = match.group('env_var') | 314 | env_var = match.group('env_var') |
| 303 | logger.debug("env_var: %s" % env_var) | 315 | logger.debug("env_var: %s" % env_var) |
| 304 | env_val = match.group('env_val') | 316 | env_val = remove_quotes(match.group('env_val')) |
| 305 | logger.debug("env_val: %s" % env_val) | 317 | logger.debug("env_val: %s" % env_val) |
| 306 | os.environ[env_var] = env_val | 318 | os.environ[env_var] = env_val |
| 307 | 319 | ||
| @@ -343,7 +355,8 @@ def main(): | |||
| 343 | 355 | ||
| 344 | finally: | 356 | finally: |
| 345 | # cleanup tmp directory | 357 | # cleanup tmp directory |
| 346 | shutil.rmtree(tmpsdk_dir) | 358 | if not args.downloads_directory: |
| 359 | shutil.rmtree(sdk_dir) | ||
| 347 | 360 | ||
| 348 | 361 | ||
| 349 | if __name__ == '__main__': | 362 | if __name__ == '__main__': |
diff --git a/scripts/lib/build_perf/html/measurement_chart.html b/scripts/lib/build_perf/html/measurement_chart.html index ad4a93ed02..86435273cf 100644 --- a/scripts/lib/build_perf/html/measurement_chart.html +++ b/scripts/lib/build_perf/html/measurement_chart.html | |||
| @@ -2,7 +2,7 @@ | |||
| 2 | // Get raw data | 2 | // Get raw data |
| 3 | const rawData = [ | 3 | const rawData = [ |
| 4 | {% for sample in measurement.samples %} | 4 | {% for sample in measurement.samples %} |
| 5 | [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}, {{ sample.start_time }}], | 5 | [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}, {{ sample.start_time }}, '{{sample.commit}}'], |
| 6 | {% endfor %} | 6 | {% endfor %} |
| 7 | ]; | 7 | ]; |
| 8 | 8 | ||
| @@ -26,27 +26,37 @@ | |||
| 26 | ] | 26 | ] |
| 27 | }); | 27 | }); |
| 28 | 28 | ||
| 29 | const commitCountList = rawData.map(([commit, value, time]) => { | ||
| 30 | return commit | ||
| 31 | }); | ||
| 32 | |||
| 33 | const commitCountData = rawData.map(([commit, value, time]) => { | ||
| 34 | return updateValue(value) | ||
| 35 | }); | ||
| 36 | |||
| 29 | // Set chart options | 37 | // Set chart options |
| 30 | const option = { | 38 | const option_start_time = { |
| 31 | tooltip: { | 39 | tooltip: { |
| 32 | trigger: 'axis', | 40 | trigger: 'axis', |
| 33 | valueFormatter: (value) => { | 41 | enterable: true, |
| 34 | const commitNumber = rawData.filter(([commit, dataValue, time]) => updateValue(dataValue) === value) | 42 | position: function (point, params, dom, rect, size) { |
| 43 | return [point[0], '0%']; | ||
| 44 | }, | ||
| 45 | formatter: function (param) { | ||
| 46 | const value = param[0].value[1] | ||
| 47 | const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value) | ||
| 48 | const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)'); | ||
| 49 | |||
| 50 | // Add commit hash to the tooltip as a link | ||
| 51 | const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}` | ||
| 35 | if ('{{ measurement.value_type.quantity }}' == 'time') { | 52 | if ('{{ measurement.value_type.quantity }}' == 'time') { |
| 36 | const hours = Math.floor(value/60) | 53 | const hours = Math.floor(value/60) |
| 37 | const minutes = Math.floor(value % 60) | 54 | const minutes = Math.floor(value % 60) |
| 38 | const seconds = Math.floor((value * 60) % 60) | 55 | const seconds = Math.floor((value * 60) % 60) |
| 39 | return [ | 56 | return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` |
| 40 | hours + ':' + minutes + ':' + seconds + ', ' + | ||
| 41 | 'commit number: ' + commitNumber[0][0] | ||
| 42 | ] | ||
| 43 | } | 57 | } |
| 44 | return [ | 58 | return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` |
| 45 | value.toFixed(2) + ' MB' + ', ' + | 59 | ;} |
| 46 | 'commit number: ' + commitNumber[0][0] | ||
| 47 | ] | ||
| 48 | }, | ||
| 49 | |||
| 50 | }, | 60 | }, |
| 51 | xAxis: { | 61 | xAxis: { |
| 52 | type: 'time', | 62 | type: 'time', |
| @@ -72,29 +82,87 @@ | |||
| 72 | { | 82 | { |
| 73 | name: '{{ measurement.value_type.quantity }}', | 83 | name: '{{ measurement.value_type.quantity }}', |
| 74 | type: 'line', | 84 | type: 'line', |
| 75 | step: 'start', | ||
| 76 | symbol: 'none', | 85 | symbol: 'none', |
| 77 | data: data | 86 | data: data |
| 78 | } | 87 | } |
| 79 | ] | 88 | ] |
| 80 | }; | 89 | }; |
| 81 | 90 | ||
| 91 | const option_commit_count = { | ||
| 92 | tooltip: { | ||
| 93 | trigger: 'axis', | ||
| 94 | enterable: true, | ||
| 95 | position: function (point, params, dom, rect, size) { | ||
| 96 | return [point[0], '0%']; | ||
| 97 | }, | ||
| 98 | formatter: function (param) { | ||
| 99 | const value = param[0].value | ||
| 100 | const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value) | ||
| 101 | const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)'); | ||
| 102 | // Add commit hash to the tooltip as a link | ||
| 103 | const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}` | ||
| 104 | if ('{{ measurement.value_type.quantity }}' == 'time') { | ||
| 105 | const hours = Math.floor(value/60) | ||
| 106 | const minutes = Math.floor(value % 60) | ||
| 107 | const seconds = Math.floor((value * 60) % 60) | ||
| 108 | return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` | ||
| 109 | } | ||
| 110 | return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` | ||
| 111 | ;} | ||
| 112 | }, | ||
| 113 | xAxis: { | ||
| 114 | name: 'Commit count', | ||
| 115 | type: 'category', | ||
| 116 | data: commitCountList | ||
| 117 | }, | ||
| 118 | yAxis: { | ||
| 119 | name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB', | ||
| 120 | type: 'value', | ||
| 121 | min: function(value) { | ||
| 122 | return Math.round(value.min - 0.5); | ||
| 123 | }, | ||
| 124 | max: function(value) { | ||
| 125 | return Math.round(value.max + 0.5); | ||
| 126 | } | ||
| 127 | }, | ||
| 128 | dataZoom: [ | ||
| 129 | { | ||
| 130 | type: 'slider', | ||
| 131 | xAxisIndex: 0, | ||
| 132 | filterMode: 'none' | ||
| 133 | }, | ||
| 134 | ], | ||
| 135 | series: [ | ||
| 136 | { | ||
| 137 | name: '{{ measurement.value_type.quantity }}', | ||
| 138 | type: 'line', | ||
| 139 | symbol: 'none', | ||
| 140 | data: commitCountData | ||
| 141 | } | ||
| 142 | ] | ||
| 143 | }; | ||
| 144 | |||
| 82 | // Draw chart | 145 | // Draw chart |
| 83 | const chart_div = document.getElementById('{{ chart_elem_id }}'); | 146 | const draw_chart = (chart_id, option) => { |
| 84 | // Set dark mode | 147 | let chart_name |
| 85 | let measurement_chart | 148 | const chart_div = document.getElementById(chart_id); |
| 86 | if (window.matchMedia('(prefers-color-scheme: dark)').matches) { | 149 | // Set dark mode |
| 87 | measurement_chart= echarts.init(chart_div, 'dark', { | 150 | if (window.matchMedia('(prefers-color-scheme: dark)').matches) { |
| 88 | height: 320 | 151 | chart_name= echarts.init(chart_div, 'dark', { |
| 89 | }); | 152 | height: 320 |
| 90 | } else { | 153 | }); |
| 91 | measurement_chart= echarts.init(chart_div, null, { | 154 | } else { |
| 92 | height: 320 | 155 | chart_name= echarts.init(chart_div, null, { |
| 156 | height: 320 | ||
| 157 | }); | ||
| 158 | } | ||
| 159 | // Change chart size with browser resize | ||
| 160 | window.addEventListener('resize', function() { | ||
| 161 | chart_name.resize(); | ||
| 93 | }); | 162 | }); |
| 163 | return chart_name.setOption(option); | ||
| 94 | } | 164 | } |
| 95 | // Change chart size with browser resize | 165 | |
| 96 | window.addEventListener('resize', function() { | 166 | draw_chart('{{ chart_elem_start_time_id }}', option_start_time) |
| 97 | measurement_chart.resize(); | 167 | draw_chart('{{ chart_elem_commit_count_id }}', option_commit_count) |
| 98 | }); | ||
| 99 | measurement_chart.setOption(option); | ||
| 100 | </script> | 168 | </script> |
diff --git a/scripts/lib/build_perf/html/report.html b/scripts/lib/build_perf/html/report.html index 537ed3ee52..28cd80e738 100644 --- a/scripts/lib/build_perf/html/report.html +++ b/scripts/lib/build_perf/html/report.html | |||
| @@ -9,7 +9,8 @@ | |||
| 9 | {% for test in test_data %} | 9 | {% for test in test_data %} |
| 10 | {% if test.status == 'SUCCESS' %} | 10 | {% if test.status == 'SUCCESS' %} |
| 11 | {% for measurement in test.measurements %} | 11 | {% for measurement in test.measurements %} |
| 12 | {% set chart_elem_id = test.name + '_' + measurement.name + '_chart' %} | 12 | {% set chart_elem_start_time_id = test.name + '_' + measurement.name + '_chart_start_time' %} |
| 13 | {% set chart_elem_commit_count_id = test.name + '_' + measurement.name + '_chart_commit_count' %} | ||
| 13 | {% include 'measurement_chart.html' %} | 14 | {% include 'measurement_chart.html' %} |
| 14 | {% endfor %} | 15 | {% endfor %} |
| 15 | {% endif %} | 16 | {% endif %} |
| @@ -116,6 +117,22 @@ a { | |||
| 116 | a:hover { | 117 | a:hover { |
| 117 | color: #8080ff; | 118 | color: #8080ff; |
| 118 | } | 119 | } |
| 120 | button { | ||
| 121 | background-color: #F3F4F6; | ||
| 122 | border: none; | ||
| 123 | outline: none; | ||
| 124 | cursor: pointer; | ||
| 125 | padding: 10px 12px; | ||
| 126 | transition: 0.3s; | ||
| 127 | border-radius: 8px; | ||
| 128 | color: #3A4353; | ||
| 129 | } | ||
| 130 | button:hover { | ||
| 131 | background-color: #d6d9e0; | ||
| 132 | } | ||
| 133 | .tab button.active { | ||
| 134 | background-color: #d6d9e0; | ||
| 135 | } | ||
| 119 | @media (prefers-color-scheme: dark) { | 136 | @media (prefers-color-scheme: dark) { |
| 120 | :root { | 137 | :root { |
| 121 | --text: #e9e8fa; | 138 | --text: #e9e8fa; |
| @@ -126,6 +143,16 @@ a:hover { | |||
| 126 | --trborder: #212936; | 143 | --trborder: #212936; |
| 127 | --chartborder: #b1b0bf; | 144 | --chartborder: #b1b0bf; |
| 128 | } | 145 | } |
| 146 | button { | ||
| 147 | background-color: #28303E; | ||
| 148 | color: #fff; | ||
| 149 | } | ||
| 150 | button:hover { | ||
| 151 | background-color: #545a69; | ||
| 152 | } | ||
| 153 | .tab button.active { | ||
| 154 | background-color: #545a69; | ||
| 155 | } | ||
| 129 | } | 156 | } |
| 130 | </style> | 157 | </style> |
| 131 | 158 | ||
| @@ -233,7 +260,18 @@ a:hover { | |||
| 233 | <tr> | 260 | <tr> |
| 234 | <td style="width: 75%"> | 261 | <td style="width: 75%"> |
| 235 | {# Linechart #} | 262 | {# Linechart #} |
| 236 | <div id="{{ test.name }}_{{ measurement.name }}_chart"></div> | 263 | <div class="tab {{ test.name }}_{{ measurement.name }}_tablinks"> |
| 264 | <button class="tablinks active" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_start_time', '{{ test.name }}_{{ measurement.name }}')">Chart with start time</button> | ||
| 265 | <button class="tablinks" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_commit_count', '{{ test.name }}_{{ measurement.name }}')">Chart with commit count</button> | ||
| 266 | </div> | ||
| 267 | <div class="{{ test.name }}_{{ measurement.name }}_tabcontent"> | ||
| 268 | <div id="{{ test.name }}_{{ measurement.name }}_start_time" class="tabcontent" style="display: block;"> | ||
| 269 | <div id="{{ test.name }}_{{ measurement.name }}_chart_start_time"></div> | ||
| 270 | </div> | ||
| 271 | <div id="{{ test.name }}_{{ measurement.name }}_commit_count" class="tabcontent" style="display: none;"> | ||
| 272 | <div id="{{ test.name }}_{{ measurement.name }}_chart_commit_count"></div> | ||
| 273 | </div> | ||
| 274 | </div> | ||
| 237 | </td> | 275 | </td> |
| 238 | <td> | 276 | <td> |
| 239 | {# Measurement statistics #} | 277 | {# Measurement statistics #} |
| @@ -340,6 +378,31 @@ a:hover { | |||
| 340 | <div class="preformatted">{{ test.message }}</div> | 378 | <div class="preformatted">{{ test.message }}</div> |
| 341 | {% endif %} | 379 | {% endif %} |
| 342 | {% endfor %} | 380 | {% endfor %} |
| 343 | </div></body> | 381 | </div> |
| 344 | </html> | ||
| 345 | 382 | ||
| 383 | <script> | ||
| 384 | function openChart(event, chartType, chartName) { | ||
| 385 | let i, tabcontents, tablinks | ||
| 386 | tabcontents = document.querySelectorAll(`.${chartName}_tabcontent > .tabcontent`); | ||
| 387 | tabcontents.forEach((tabcontent) => { | ||
| 388 | tabcontent.style.display = "none"; | ||
| 389 | }); | ||
| 390 | |||
| 391 | tablinks = document.querySelectorAll(`.${chartName}_tablinks > .tablinks`); | ||
| 392 | tablinks.forEach((tabLink) => { | ||
| 393 | tabLink.classList.remove('active'); | ||
| 394 | }); | ||
| 395 | |||
| 396 | const targetTab = document.getElementById(chartType) | ||
| 397 | targetTab.style.display = "block"; | ||
| 398 | |||
| 399 | // Call resize on the ECharts instance to redraw the chart | ||
| 400 | const chartContainer = targetTab.querySelector('div') | ||
| 401 | echarts.init(chartContainer).resize(); | ||
| 402 | |||
| 403 | event.currentTarget.classList.add('active'); | ||
| 404 | } | ||
| 405 | </script> | ||
| 406 | |||
| 407 | </body> | ||
| 408 | </html> | ||
diff --git a/scripts/lib/checklayer/__init__.py b/scripts/lib/checklayer/__init__.py index 62ecdfe390..86aadf39a6 100644 --- a/scripts/lib/checklayer/__init__.py +++ b/scripts/lib/checklayer/__init__.py | |||
| @@ -452,3 +452,15 @@ def compare_signatures(old_sigs, curr_sigs): | |||
| 452 | msg.extend([' ' + line for line in output.splitlines()]) | 452 | msg.extend([' ' + line for line in output.splitlines()]) |
| 453 | msg.append('') | 453 | msg.append('') |
| 454 | return '\n'.join(msg) | 454 | return '\n'.join(msg) |
| 455 | |||
| 456 | |||
| 457 | def get_git_toplevel(directory): | ||
| 458 | """ | ||
| 459 | Try and find the top of the git repository that directory might be in. | ||
| 460 | Returns the top-level directory, or None. | ||
| 461 | """ | ||
| 462 | cmd = ["git", "-C", directory, "rev-parse", "--show-toplevel"] | ||
| 463 | try: | ||
| 464 | return subprocess.check_output(cmd, text=True).strip() | ||
| 465 | except: | ||
| 466 | return None | ||
diff --git a/scripts/lib/checklayer/cases/common.py b/scripts/lib/checklayer/cases/common.py index 97b16f78c8..ddead69a7b 100644 --- a/scripts/lib/checklayer/cases/common.py +++ b/scripts/lib/checklayer/cases/common.py | |||
| @@ -7,7 +7,7 @@ import glob | |||
| 7 | import os | 7 | import os |
| 8 | import unittest | 8 | import unittest |
| 9 | import re | 9 | import re |
| 10 | from checklayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures | 10 | from checklayer import get_signatures, LayerType, check_command, compare_signatures, get_git_toplevel |
| 11 | from checklayer.case import OECheckLayerTestCase | 11 | from checklayer.case import OECheckLayerTestCase |
| 12 | 12 | ||
| 13 | class CommonCheckLayer(OECheckLayerTestCase): | 13 | class CommonCheckLayer(OECheckLayerTestCase): |
| @@ -40,6 +40,38 @@ class CommonCheckLayer(OECheckLayerTestCase): | |||
| 40 | email_regex = re.compile(r"[^@]+@[^@]+") | 40 | email_regex = re.compile(r"[^@]+@[^@]+") |
| 41 | self.assertTrue(email_regex.match(data)) | 41 | self.assertTrue(email_regex.match(data)) |
| 42 | 42 | ||
| 43 | def find_file_by_name(self, globs): | ||
| 44 | """ | ||
| 45 | Utility function to find a file that matches the specified list of | ||
| 46 | globs, in either the layer directory itself or the repository top-level | ||
| 47 | directory. | ||
| 48 | """ | ||
| 49 | directories = [self.tc.layer["path"]] | ||
| 50 | toplevel = get_git_toplevel(directories[0]) | ||
| 51 | if toplevel: | ||
| 52 | directories.append(toplevel) | ||
| 53 | |||
| 54 | for path in directories: | ||
| 55 | for name in globs: | ||
| 56 | files = glob.glob(os.path.join(path, name)) | ||
| 57 | if files: | ||
| 58 | return sorted(files)[0] | ||
| 59 | return None | ||
| 60 | |||
| 61 | def test_security(self): | ||
| 62 | """ | ||
| 63 | Test that the layer has a SECURITY.md (or similar) file, either in the | ||
| 64 | layer itself or at the top of the containing git repository. | ||
| 65 | """ | ||
| 66 | if self.tc.layer["type"] == LayerType.CORE: | ||
| 67 | raise unittest.SkipTest("Core layer's SECURITY is top level") | ||
| 68 | |||
| 69 | filename = self.find_file_by_name(("SECURITY", "SECURITY.*")) | ||
| 70 | self.assertTrue(filename, msg="Layer doesn't contain a SECURITY.md file.") | ||
| 71 | |||
| 72 | size = os.path.getsize(filename) | ||
| 73 | self.assertGreater(size, 0, msg=f"{filename} has no content.") | ||
| 74 | |||
| 43 | def test_parse(self): | 75 | def test_parse(self): |
| 44 | check_command('Layer %s failed to parse.' % self.tc.layer['name'], | 76 | check_command('Layer %s failed to parse.' % self.tc.layer['name'], |
| 45 | 'bitbake -p') | 77 | 'bitbake -p') |
| @@ -72,7 +104,6 @@ class CommonCheckLayer(OECheckLayerTestCase): | |||
| 72 | self.tc.layer['name']) | 104 | self.tc.layer['name']) |
| 73 | self.fail('\n'.join(msg)) | 105 | self.fail('\n'.join(msg)) |
| 74 | 106 | ||
| 75 | @unittest.expectedFailure | ||
| 76 | def test_patches_upstream_status(self): | 107 | def test_patches_upstream_status(self): |
| 77 | import sys | 108 | import sys |
| 78 | sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/')) | 109 | sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/')) |
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py index 6133c1c5b4..969d6dc13a 100644 --- a/scripts/lib/devtool/__init__.py +++ b/scripts/lib/devtool/__init__.py | |||
| @@ -26,7 +26,7 @@ class DevtoolError(Exception): | |||
| 26 | 26 | ||
| 27 | def exec_build_env_command(init_path, builddir, cmd, watch=False, **options): | 27 | def exec_build_env_command(init_path, builddir, cmd, watch=False, **options): |
| 28 | """Run a program in bitbake build context""" | 28 | """Run a program in bitbake build context""" |
| 29 | import bb | 29 | import bb.process |
| 30 | if not 'cwd' in options: | 30 | if not 'cwd' in options: |
| 31 | options["cwd"] = builddir | 31 | options["cwd"] = builddir |
| 32 | if init_path: | 32 | if init_path: |
| @@ -50,7 +50,7 @@ def exec_build_env_command(init_path, builddir, cmd, watch=False, **options): | |||
| 50 | 50 | ||
| 51 | def exec_watch(cmd, **options): | 51 | def exec_watch(cmd, **options): |
| 52 | """Run program with stdout shown on sys.stdout""" | 52 | """Run program with stdout shown on sys.stdout""" |
| 53 | import bb | 53 | import bb.process |
| 54 | if isinstance(cmd, str) and not "shell" in options: | 54 | if isinstance(cmd, str) and not "shell" in options: |
| 55 | options["shell"] = True | 55 | options["shell"] = True |
| 56 | 56 | ||
| @@ -66,7 +66,7 @@ def exec_watch(cmd, **options): | |||
| 66 | sys.stdout.write(out) | 66 | sys.stdout.write(out) |
| 67 | sys.stdout.flush() | 67 | sys.stdout.flush() |
| 68 | buf += out | 68 | buf += out |
| 69 | elif out == '' and process.poll() != None: | 69 | elif out == '' and process.poll() is not None: |
| 70 | break | 70 | break |
| 71 | 71 | ||
| 72 | if process.returncode != 0: | 72 | if process.returncode != 0: |
| @@ -74,13 +74,6 @@ def exec_watch(cmd, **options): | |||
| 74 | 74 | ||
| 75 | return buf, None | 75 | return buf, None |
| 76 | 76 | ||
| 77 | def exec_fakeroot(d, cmd, **kwargs): | ||
| 78 | """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions""" | ||
| 79 | # Grab the command and check it actually exists | ||
| 80 | fakerootcmd = d.getVar('FAKEROOTCMD') | ||
| 81 | fakerootenv = d.getVar('FAKEROOTENV') | ||
| 82 | exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, kwargs) | ||
| 83 | |||
| 84 | def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs): | 77 | def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs): |
| 85 | if not os.path.exists(fakerootcmd): | 78 | if not os.path.exists(fakerootcmd): |
| 86 | logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built') | 79 | logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built') |
| @@ -122,6 +115,7 @@ def setup_tinfoil(config_only=False, basepath=None, tracking=False): | |||
| 122 | 115 | ||
| 123 | def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True): | 116 | def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True): |
| 124 | """Parse the specified recipe""" | 117 | """Parse the specified recipe""" |
| 118 | import bb.providers | ||
| 125 | try: | 119 | try: |
| 126 | recipefile = tinfoil.get_recipe_file(pn) | 120 | recipefile = tinfoil.get_recipe_file(pn) |
| 127 | except bb.providers.NoProvider as e: | 121 | except bb.providers.NoProvider as e: |
| @@ -147,6 +141,8 @@ def check_workspace_recipe(workspace, pn, checksrc=True, bbclassextend=False): | |||
| 147 | Check that a recipe is in the workspace and (optionally) that source | 141 | Check that a recipe is in the workspace and (optionally) that source |
| 148 | is present. | 142 | is present. |
| 149 | """ | 143 | """ |
| 144 | import bb.runqueue | ||
| 145 | _, pn = bb.runqueue.split_mc(pn) | ||
| 150 | 146 | ||
| 151 | workspacepn = pn | 147 | workspacepn = pn |
| 152 | 148 | ||
| @@ -176,6 +172,7 @@ def use_external_build(same_dir, no_same_dir, d): | |||
| 176 | """ | 172 | """ |
| 177 | Determine if we should use B!=S (separate build and source directories) or not | 173 | Determine if we should use B!=S (separate build and source directories) or not |
| 178 | """ | 174 | """ |
| 175 | import bb.data | ||
| 179 | b_is_s = True | 176 | b_is_s = True |
| 180 | if no_same_dir: | 177 | if no_same_dir: |
| 181 | logger.info('Using separate build directory since --no-same-dir specified') | 178 | logger.info('Using separate build directory since --no-same-dir specified') |
| @@ -234,7 +231,7 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None): | |||
| 234 | f.write(line) | 231 | f.write(line) |
| 235 | 232 | ||
| 236 | bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) | 233 | bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) |
| 237 | bb.process.run('git tag -f %s' % basetag, cwd=repodir) | 234 | bb.process.run('git tag -f --no-sign %s' % basetag, cwd=repodir) |
| 238 | 235 | ||
| 239 | # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now, | 236 | # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now, |
| 240 | # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe | 237 | # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe |
| @@ -256,7 +253,7 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None): | |||
| 256 | oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d) | 253 | oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d) |
| 257 | found = False | 254 | found = False |
| 258 | if os.path.exists(os.path.join(repodir, '.gitmodules')): | 255 | if os.path.exists(os.path.join(repodir, '.gitmodules')): |
| 259 | bb.process.run('git submodule foreach --recursive "git tag -f %s"' % basetag, cwd=repodir) | 256 | bb.process.run('git submodule foreach --recursive "git tag -f --no-sign %s"' % basetag, cwd=repodir) |
| 260 | 257 | ||
| 261 | def recipe_to_append(recipefile, config, wildcard=False): | 258 | def recipe_to_append(recipefile, config, wildcard=False): |
| 262 | """ | 259 | """ |
| @@ -298,31 +295,18 @@ def get_bbclassextend_targets(recipefile, pn): | |||
| 298 | 295 | ||
| 299 | def replace_from_file(path, old, new): | 296 | def replace_from_file(path, old, new): |
| 300 | """Replace strings on a file""" | 297 | """Replace strings on a file""" |
| 301 | |||
| 302 | def read_file(path): | ||
| 303 | data = None | ||
| 304 | with open(path) as f: | ||
| 305 | data = f.read() | ||
| 306 | return data | ||
| 307 | |||
| 308 | def write_file(path, data): | ||
| 309 | if data is None: | ||
| 310 | return | ||
| 311 | wdata = data.rstrip() + "\n" | ||
| 312 | with open(path, "w") as f: | ||
| 313 | f.write(wdata) | ||
| 314 | |||
| 315 | # In case old is None, return immediately | ||
| 316 | if old is None: | 298 | if old is None: |
| 317 | return | 299 | return |
| 300 | |||
| 318 | try: | 301 | try: |
| 319 | rdata = read_file(path) | 302 | with open(path) as f: |
| 303 | rdata = f.read() | ||
| 320 | except IOError as e: | 304 | except IOError as e: |
| 305 | import errno | ||
| 321 | # if file does not exit, just quit, otherwise raise an exception | 306 | # if file does not exit, just quit, otherwise raise an exception |
| 322 | if e.errno == errno.ENOENT: | 307 | if e.errno == errno.ENOENT: |
| 323 | return | 308 | return |
| 324 | else: | 309 | raise |
| 325 | raise | ||
| 326 | 310 | ||
| 327 | old_contents = rdata.splitlines() | 311 | old_contents = rdata.splitlines() |
| 328 | new_contents = [] | 312 | new_contents = [] |
| @@ -331,12 +315,16 @@ def replace_from_file(path, old, new): | |||
| 331 | new_contents.append(old_content.replace(old, new)) | 315 | new_contents.append(old_content.replace(old, new)) |
| 332 | except ValueError: | 316 | except ValueError: |
| 333 | pass | 317 | pass |
| 334 | write_file(path, "\n".join(new_contents)) | 318 | |
| 319 | wdata = ("\n".join(new_contents)).rstrip() + "\n" | ||
| 320 | with open(path, "w") as f: | ||
| 321 | f.write(wdata) | ||
| 335 | 322 | ||
| 336 | 323 | ||
| 337 | def update_unlockedsigs(basepath, workspace, fixed_setup, extra=None): | 324 | def update_unlockedsigs(basepath, workspace, fixed_setup, extra=None): |
| 338 | """ This function will make unlocked-sigs.inc match the recipes in the | 325 | """ This function will make unlocked-sigs.inc match the recipes in the |
| 339 | workspace plus any extras we want unlocked. """ | 326 | workspace plus any extras we want unlocked. """ |
| 327 | import bb.utils | ||
| 340 | 328 | ||
| 341 | if not fixed_setup: | 329 | if not fixed_setup: |
| 342 | # Only need to write this out within the eSDK | 330 | # Only need to write this out within the eSDK |
| @@ -388,11 +376,13 @@ def check_prerelease_version(ver, operation): | |||
| 388 | 376 | ||
| 389 | def check_git_repo_dirty(repodir): | 377 | def check_git_repo_dirty(repodir): |
| 390 | """Check if a git repository is clean or not""" | 378 | """Check if a git repository is clean or not""" |
| 379 | import bb.process | ||
| 391 | stdout, _ = bb.process.run('git status --porcelain', cwd=repodir) | 380 | stdout, _ = bb.process.run('git status --porcelain', cwd=repodir) |
| 392 | return stdout | 381 | return stdout |
| 393 | 382 | ||
| 394 | def check_git_repo_op(srctree, ignoredirs=None): | 383 | def check_git_repo_op(srctree, ignoredirs=None): |
| 395 | """Check if a git repository is in the middle of a rebase""" | 384 | """Check if a git repository is in the middle of a rebase""" |
| 385 | import bb.process | ||
| 396 | stdout, _ = bb.process.run('git rev-parse --show-toplevel', cwd=srctree) | 386 | stdout, _ = bb.process.run('git rev-parse --show-toplevel', cwd=srctree) |
| 397 | topleveldir = stdout.strip() | 387 | topleveldir = stdout.strip() |
| 398 | if ignoredirs and topleveldir in ignoredirs: | 388 | if ignoredirs and topleveldir in ignoredirs: |
diff --git a/scripts/lib/devtool/build.py b/scripts/lib/devtool/build.py index 935ffab46c..0b2c3d33dc 100644 --- a/scripts/lib/devtool/build.py +++ b/scripts/lib/devtool/build.py | |||
| @@ -49,7 +49,7 @@ def build(args, config, basepath, workspace): | |||
| 49 | rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) | 49 | rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) |
| 50 | if not rd: | 50 | if not rd: |
| 51 | return 1 | 51 | return 1 |
| 52 | deploytask = 'do_deploy' in rd.getVar('__BBTASKS') | 52 | deploytask = 'do_deploy' in bb.build.listtasks(rd) |
| 53 | finally: | 53 | finally: |
| 54 | tinfoil.shutdown() | 54 | tinfoil.shutdown() |
| 55 | 55 | ||
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py index 1cd4831d2b..990303982c 100644 --- a/scripts/lib/devtool/build_sdk.py +++ b/scripts/lib/devtool/build_sdk.py | |||
| @@ -5,14 +5,7 @@ | |||
| 5 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
| 6 | # | 6 | # |
| 7 | 7 | ||
| 8 | import os | ||
| 9 | import subprocess | ||
| 10 | import logging | 8 | import logging |
| 11 | import glob | ||
| 12 | import shutil | ||
| 13 | import errno | ||
| 14 | import sys | ||
| 15 | import tempfile | ||
| 16 | from devtool import DevtoolError | 9 | from devtool import DevtoolError |
| 17 | from devtool import build_image | 10 | from devtool import build_image |
| 18 | 11 | ||
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py index a62b93224e..ee5bb57265 100644 --- a/scripts/lib/devtool/ide_plugins/ide_code.py +++ b/scripts/lib/devtool/ide_plugins/ide_code.py | |||
| @@ -161,7 +161,6 @@ class IdeVSCode(IdeBase): | |||
| 161 | if modified_recipe.build_tool is not BuildTool.CMAKE: | 161 | if modified_recipe.build_tool is not BuildTool.CMAKE: |
| 162 | return | 162 | return |
| 163 | recommendations += [ | 163 | recommendations += [ |
| 164 | "twxs.cmake", | ||
| 165 | "ms-vscode.cmake-tools", | 164 | "ms-vscode.cmake-tools", |
| 166 | "ms-vscode.cpptools", | 165 | "ms-vscode.cpptools", |
| 167 | "ms-vscode.cpptools-extension-pack", | 166 | "ms-vscode.cpptools-extension-pack", |
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py index 65873b088d..d9b54f7991 100755 --- a/scripts/lib/devtool/ide_sdk.py +++ b/scripts/lib/devtool/ide_sdk.py | |||
| @@ -167,7 +167,7 @@ class RecipeImage: | |||
| 167 | self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg') | 167 | self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg') |
| 168 | 168 | ||
| 169 | self.gdbserver_missing = 'gdbserver' not in image_d.getVar( | 169 | self.gdbserver_missing = 'gdbserver' not in image_d.getVar( |
| 170 | 'IMAGE_INSTALL') | 170 | 'IMAGE_INSTALL') and 'tools-debug' not in image_d.getVar('IMAGE_FEATURES') |
| 171 | 171 | ||
| 172 | @property | 172 | @property |
| 173 | def debug_support(self): | 173 | def debug_support(self): |
| @@ -286,8 +286,10 @@ class RecipeModified: | |||
| 286 | self.b = None | 286 | self.b = None |
| 287 | self.base_libdir = None | 287 | self.base_libdir = None |
| 288 | self.bblayers = None | 288 | self.bblayers = None |
| 289 | self.bitbakepath = None | ||
| 289 | self.bpn = None | 290 | self.bpn = None |
| 290 | self.d = None | 291 | self.d = None |
| 292 | self.debug_build = None | ||
| 291 | self.fakerootcmd = None | 293 | self.fakerootcmd = None |
| 292 | self.fakerootenv = None | 294 | self.fakerootenv = None |
| 293 | self.libdir = None | 295 | self.libdir = None |
| @@ -296,6 +298,7 @@ class RecipeModified: | |||
| 296 | self.package_debug_split_style = None | 298 | self.package_debug_split_style = None |
| 297 | self.path = None | 299 | self.path = None |
| 298 | self.pn = None | 300 | self.pn = None |
| 301 | self.recipe_id = None | ||
| 299 | self.recipe_sysroot = None | 302 | self.recipe_sysroot = None |
| 300 | self.recipe_sysroot_native = None | 303 | self.recipe_sysroot_native = None |
| 301 | self.staging_incdir = None | 304 | self.staging_incdir = None |
| @@ -304,7 +307,6 @@ class RecipeModified: | |||
| 304 | self.target_dbgsrc_dir = None | 307 | self.target_dbgsrc_dir = None |
| 305 | self.topdir = None | 308 | self.topdir = None |
| 306 | self.workdir = None | 309 | self.workdir = None |
| 307 | self.recipe_id = None | ||
| 308 | # replicate bitbake build environment | 310 | # replicate bitbake build environment |
| 309 | self.exported_vars = None | 311 | self.exported_vars = None |
| 310 | self.cmd_compile = None | 312 | self.cmd_compile = None |
| @@ -333,7 +335,7 @@ class RecipeModified: | |||
| 333 | self.srctree = workspace[workspacepn]['srctree'] | 335 | self.srctree = workspace[workspacepn]['srctree'] |
| 334 | # Need to grab this here in case the source is within a subdirectory | 336 | # Need to grab this here in case the source is within a subdirectory |
| 335 | self.real_srctree = get_real_srctree( | 337 | self.real_srctree = get_real_srctree( |
| 336 | self.srctree, recipe_d.getVar('S'), recipe_d.getVar('WORKDIR')) | 338 | self.srctree, recipe_d.getVar('S'), recipe_d.getVar('UNPACKDIR')) |
| 337 | self.bbappend = workspace[workspacepn]['bbappend'] | 339 | self.bbappend = workspace[workspacepn]['bbappend'] |
| 338 | 340 | ||
| 339 | self.ide_sdk_dir = os.path.join( | 341 | self.ide_sdk_dir = os.path.join( |
| @@ -345,9 +347,11 @@ class RecipeModified: | |||
| 345 | self.b = recipe_d.getVar('B') | 347 | self.b = recipe_d.getVar('B') |
| 346 | self.base_libdir = recipe_d.getVar('base_libdir') | 348 | self.base_libdir = recipe_d.getVar('base_libdir') |
| 347 | self.bblayers = recipe_d.getVar('BBLAYERS').split() | 349 | self.bblayers = recipe_d.getVar('BBLAYERS').split() |
| 350 | self.bitbakepath = recipe_d.getVar('BITBAKEPATH') | ||
| 348 | self.bpn = recipe_d.getVar('BPN') | 351 | self.bpn = recipe_d.getVar('BPN') |
| 349 | self.cxx = recipe_d.getVar('CXX') | 352 | self.cxx = recipe_d.getVar('CXX') |
| 350 | self.d = recipe_d.getVar('D') | 353 | self.d = recipe_d.getVar('D') |
| 354 | self.debug_build = recipe_d.getVar('DEBUG_BUILD') | ||
| 351 | self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD') | 355 | self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD') |
| 352 | self.fakerootenv = recipe_d.getVar('FAKEROOTENV') | 356 | self.fakerootenv = recipe_d.getVar('FAKEROOTENV') |
| 353 | self.libdir = recipe_d.getVar('libdir') | 357 | self.libdir = recipe_d.getVar('libdir') |
| @@ -389,17 +393,6 @@ class RecipeModified: | |||
| 389 | self.recipe_id = self.bpn + "-" + self.package_arch | 393 | self.recipe_id = self.bpn + "-" + self.package_arch |
| 390 | self.recipe_id_pretty = self.bpn + ": " + self.package_arch | 394 | self.recipe_id_pretty = self.bpn + ": " + self.package_arch |
| 391 | 395 | ||
| 392 | def append_to_bbappend(self, append_text): | ||
| 393 | with open(self.bbappend, 'a') as bbap: | ||
| 394 | bbap.write(append_text) | ||
| 395 | |||
| 396 | def remove_from_bbappend(self, append_text): | ||
| 397 | with open(self.bbappend, 'r') as bbap: | ||
| 398 | text = bbap.read() | ||
| 399 | new_text = text.replace(append_text, '') | ||
| 400 | with open(self.bbappend, 'w') as bbap: | ||
| 401 | bbap.write(new_text) | ||
| 402 | |||
| 403 | @staticmethod | 396 | @staticmethod |
| 404 | def is_valid_shell_variable(var): | 397 | def is_valid_shell_variable(var): |
| 405 | """Skip strange shell variables like systemd | 398 | """Skip strange shell variables like systemd |
| @@ -412,34 +405,6 @@ class RecipeModified: | |||
| 412 | return True | 405 | return True |
| 413 | return False | 406 | return False |
| 414 | 407 | ||
| 415 | def debug_build_config(self, args): | ||
| 416 | """Explicitely set for example CMAKE_BUILD_TYPE to Debug if not defined otherwise""" | ||
| 417 | if self.build_tool is BuildTool.CMAKE: | ||
| 418 | append_text = os.linesep + \ | ||
| 419 | 'OECMAKE_ARGS:append = " -DCMAKE_BUILD_TYPE:STRING=Debug"' + os.linesep | ||
| 420 | if args.debug_build_config and not 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars: | ||
| 421 | self.cmake_cache_vars['CMAKE_BUILD_TYPE'] = { | ||
| 422 | "type": "STRING", | ||
| 423 | "value": "Debug", | ||
| 424 | } | ||
| 425 | self.append_to_bbappend(append_text) | ||
| 426 | elif 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars: | ||
| 427 | del self.cmake_cache_vars['CMAKE_BUILD_TYPE'] | ||
| 428 | self.remove_from_bbappend(append_text) | ||
| 429 | elif self.build_tool is BuildTool.MESON: | ||
| 430 | append_text = os.linesep + 'MESON_BUILDTYPE = "debug"' + os.linesep | ||
| 431 | if args.debug_build_config and self.meson_buildtype != "debug": | ||
| 432 | self.mesonopts.replace( | ||
| 433 | '--buildtype ' + self.meson_buildtype, '--buildtype debug') | ||
| 434 | self.append_to_bbappend(append_text) | ||
| 435 | elif self.meson_buildtype == "debug": | ||
| 436 | self.mesonopts.replace( | ||
| 437 | '--buildtype debug', '--buildtype plain') | ||
| 438 | self.remove_from_bbappend(append_text) | ||
| 439 | elif args.debug_build_config: | ||
| 440 | logger.warn( | ||
| 441 | "--debug-build-config is not implemented for this build tool yet.") | ||
| 442 | |||
| 443 | def solib_search_path(self, image): | 408 | def solib_search_path(self, image): |
| 444 | """Search for debug symbols in the rootfs and rootfs-dbg | 409 | """Search for debug symbols in the rootfs and rootfs-dbg |
| 445 | 410 | ||
| @@ -493,7 +458,7 @@ class RecipeModified: | |||
| 493 | 458 | ||
| 494 | vars = (key for key in d.keys() if not key.startswith( | 459 | vars = (key for key in d.keys() if not key.startswith( |
| 495 | "__") and not d.getVarFlag(key, "func", False)) | 460 | "__") and not d.getVarFlag(key, "func", False)) |
| 496 | for var in vars: | 461 | for var in sorted(vars): |
| 497 | func = d.getVarFlag(var, "func", False) | 462 | func = d.getVarFlag(var, "func", False) |
| 498 | if d.getVarFlag(var, 'python', False) and func: | 463 | if d.getVarFlag(var, 'python', False) and func: |
| 499 | continue | 464 | continue |
| @@ -545,7 +510,7 @@ class RecipeModified: | |||
| 545 | cache_vars = {} | 510 | cache_vars = {} |
| 546 | oecmake_args = d.getVar('OECMAKE_ARGS').split() | 511 | oecmake_args = d.getVar('OECMAKE_ARGS').split() |
| 547 | extra_oecmake = d.getVar('EXTRA_OECMAKE').split() | 512 | extra_oecmake = d.getVar('EXTRA_OECMAKE').split() |
| 548 | for param in oecmake_args + extra_oecmake: | 513 | for param in sorted(oecmake_args + extra_oecmake): |
| 549 | d_pref = "-D" | 514 | d_pref = "-D" |
| 550 | if param.startswith(d_pref): | 515 | if param.startswith(d_pref): |
| 551 | param = param[len(d_pref):] | 516 | param = param[len(d_pref):] |
| @@ -712,42 +677,6 @@ class RecipeModified: | |||
| 712 | binaries.append(abs_name[d_len:]) | 677 | binaries.append(abs_name[d_len:]) |
| 713 | return sorted(binaries) | 678 | return sorted(binaries) |
| 714 | 679 | ||
| 715 | def gen_delete_package_dirs(self): | ||
| 716 | """delete folders of package tasks | ||
| 717 | |||
| 718 | This is a workaround for and issue with recipes having their sources | ||
| 719 | downloaded as file:// | ||
| 720 | This likely breaks pseudo like: | ||
| 721 | path mismatch [3 links]: ino 79147802 db | ||
| 722 | .../build/tmp/.../cmake-example/1.0/package/usr/src/debug/ | ||
| 723 | cmake-example/1.0-r0/oe-local-files/cpp-example-lib.cpp | ||
| 724 | .../build/workspace/sources/cmake-example/oe-local-files/cpp-example-lib.cpp | ||
| 725 | Since the files are anyway outdated lets deleted them (also from pseudo's db) to workaround this issue. | ||
| 726 | """ | ||
| 727 | cmd_lines = ['#!/bin/sh'] | ||
| 728 | |||
| 729 | # Set up the appropriate environment | ||
| 730 | newenv = dict(os.environ) | ||
| 731 | for varvalue in self.fakerootenv.split(): | ||
| 732 | if '=' in varvalue: | ||
| 733 | splitval = varvalue.split('=', 1) | ||
| 734 | newenv[splitval[0]] = splitval[1] | ||
| 735 | |||
| 736 | # Replicate the environment variables from bitbake | ||
| 737 | for var, val in newenv.items(): | ||
| 738 | if not RecipeModified.is_valid_shell_variable(var): | ||
| 739 | continue | ||
| 740 | cmd_lines.append('%s="%s"' % (var, val)) | ||
| 741 | cmd_lines.append('export %s' % var) | ||
| 742 | |||
| 743 | # Delete the folders | ||
| 744 | pkg_dirs = ' '.join([os.path.join(self.workdir, d) for d in [ | ||
| 745 | "package", "packages-split", "pkgdata", "sstate-install-package", "debugsources.list", "*.spec"]]) | ||
| 746 | cmd = "%s rm -rf %s" % (self.fakerootcmd, pkg_dirs) | ||
| 747 | cmd_lines.append('%s || { "%s failed"; exit 1; }' % (cmd, cmd)) | ||
| 748 | |||
| 749 | return self.write_script(cmd_lines, 'delete_package_dirs') | ||
| 750 | |||
| 751 | def gen_deploy_target_script(self, args): | 680 | def gen_deploy_target_script(self, args): |
| 752 | """Generate a script which does what devtool deploy-target does | 681 | """Generate a script which does what devtool deploy-target does |
| 753 | 682 | ||
| @@ -783,16 +712,15 @@ class RecipeModified: | |||
| 783 | 712 | ||
| 784 | def gen_install_deploy_script(self, args): | 713 | def gen_install_deploy_script(self, args): |
| 785 | """Generate a script which does install and deploy""" | 714 | """Generate a script which does install and deploy""" |
| 786 | cmd_lines = ['#!/bin/bash'] | 715 | cmd_lines = ['#!/bin/sh'] |
| 787 | |||
| 788 | cmd_lines.append(self.gen_delete_package_dirs()) | ||
| 789 | 716 | ||
| 790 | # . oe-init-build-env $BUILDDIR | 717 | # . oe-init-build-env $BUILDDIR $BITBAKEDIR |
| 791 | # Note: Sourcing scripts with arguments requires bash | 718 | # Using 'set' to pass the build directory to oe-init-build-env in sh syntax |
| 792 | cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % ( | 719 | cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % ( |
| 793 | self.oe_init_dir, self.oe_init_dir)) | 720 | self.oe_init_dir, self.oe_init_dir)) |
| 794 | cmd_lines.append('. "%s" "%s" || { echo ". %s %s failed"; exit 1; }' % ( | 721 | cmd_lines.append('set %s %s' % (self.topdir, self.bitbakepath.rstrip('/bin'))) |
| 795 | self.oe_init_build_env, self.topdir, self.oe_init_build_env, self.topdir)) | 722 | cmd_lines.append('. "%s" || { echo ". %s %s failed"; exit 1; }' % ( |
| 723 | self.oe_init_build_env, self.oe_init_build_env, self.topdir)) | ||
| 796 | 724 | ||
| 797 | # bitbake -c install | 725 | # bitbake -c install |
| 798 | cmd_lines.append( | 726 | cmd_lines.append( |
| @@ -988,6 +916,13 @@ def ide_setup(args, config, basepath, workspace): | |||
| 988 | recipe_modified.gen_meson_wrapper() | 916 | recipe_modified.gen_meson_wrapper() |
| 989 | ide.setup_modified_recipe( | 917 | ide.setup_modified_recipe( |
| 990 | args, recipe_image, recipe_modified) | 918 | args, recipe_image, recipe_modified) |
| 919 | |||
| 920 | if recipe_modified.debug_build != '1': | ||
| 921 | logger.warn( | ||
| 922 | 'Recipe %s is compiled with release build configuration. ' | ||
| 923 | 'You might want to add DEBUG_BUILD = "1" to %s. ' | ||
| 924 | 'Note that devtool modify --debug-build can do this automatically.', | ||
| 925 | recipe_modified.name, recipe_modified.bbappend) | ||
| 991 | else: | 926 | else: |
| 992 | raise DevtoolError("Must not end up here.") | 927 | raise DevtoolError("Must not end up here.") |
| 993 | 928 | ||
| @@ -995,6 +930,15 @@ def ide_setup(args, config, basepath, workspace): | |||
| 995 | def register_commands(subparsers, context): | 930 | def register_commands(subparsers, context): |
| 996 | """Register devtool subcommands from this plugin""" | 931 | """Register devtool subcommands from this plugin""" |
| 997 | 932 | ||
| 933 | # The ide-sdk command bootstraps the SDK from the bitbake environment before the IDE | ||
| 934 | # configuration is generated. In the case of the eSDK, the bootstrapping is performed | ||
| 935 | # during the installation of the eSDK installer. Running the ide-sdk plugin from an | ||
| 936 | # eSDK installer-based setup would require skipping the bootstrapping and probably | ||
| 937 | # taking some other differences into account when generating the IDE configurations. | ||
| 938 | # This would be possible. But it is not implemented. | ||
| 939 | if context.fixed_setup: | ||
| 940 | return | ||
| 941 | |||
| 998 | global ide_plugins | 942 | global ide_plugins |
| 999 | 943 | ||
| 1000 | # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins. | 944 | # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins. |
| @@ -1015,7 +959,7 @@ def register_commands(subparsers, context): | |||
| 1015 | help='Setup the SDK and configure the IDE') | 959 | help='Setup the SDK and configure the IDE') |
| 1016 | parser_ide_sdk.add_argument( | 960 | parser_ide_sdk.add_argument( |
| 1017 | 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n' | 961 | 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n' |
| 1018 | 'Depending on the --mode paramter different types of SDKs and IDE configurations are generated.') | 962 | 'Depending on the --mode parameter different types of SDKs and IDE configurations are generated.') |
| 1019 | parser_ide_sdk.add_argument( | 963 | parser_ide_sdk.add_argument( |
| 1020 | '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified, | 964 | '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified, |
| 1021 | help='Different SDK types are supported:\n' | 965 | help='Different SDK types are supported:\n' |
| @@ -1065,6 +1009,4 @@ def register_commands(subparsers, context): | |||
| 1065 | '-p', '--no-preserve', help='Do not preserve existing files', action='store_true') | 1009 | '-p', '--no-preserve', help='Do not preserve existing files', action='store_true') |
| 1066 | parser_ide_sdk.add_argument( | 1010 | parser_ide_sdk.add_argument( |
| 1067 | '--no-check-space', help='Do not check for available space before deploying', action='store_true') | 1011 | '--no-check-space', help='Do not check for available space before deploying', action='store_true') |
| 1068 | parser_ide_sdk.add_argument( | ||
| 1069 | '--debug-build-config', help='Use debug build flags, for example set CMAKE_BUILD_TYPE=Debug', action='store_true') | ||
| 1070 | parser_ide_sdk.set_defaults(func=ide_setup) | 1012 | parser_ide_sdk.set_defaults(func=ide_setup) |
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py index 18daef30c3..1054960551 100644 --- a/scripts/lib/devtool/menuconfig.py +++ b/scripts/lib/devtool/menuconfig.py | |||
| @@ -23,9 +23,6 @@ | |||
| 23 | import os | 23 | import os |
| 24 | import bb | 24 | import bb |
| 25 | import logging | 25 | import logging |
| 26 | import argparse | ||
| 27 | import re | ||
| 28 | import glob | ||
| 29 | from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command | 26 | from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command |
| 30 | from devtool import check_workspace_recipe | 27 | from devtool import check_workspace_recipe |
| 31 | logger = logging.getLogger('devtool') | 28 | logger = logging.getLogger('devtool') |
| @@ -34,7 +31,6 @@ def menuconfig(args, config, basepath, workspace): | |||
| 34 | """Entry point for the devtool 'menuconfig' subcommand""" | 31 | """Entry point for the devtool 'menuconfig' subcommand""" |
| 35 | 32 | ||
| 36 | rd = "" | 33 | rd = "" |
| 37 | kconfigpath = "" | ||
| 38 | pn_src = "" | 34 | pn_src = "" |
| 39 | localfilesdir = "" | 35 | localfilesdir = "" |
| 40 | workspace_dir = "" | 36 | workspace_dir = "" |
| @@ -51,7 +47,6 @@ def menuconfig(args, config, basepath, workspace): | |||
| 51 | raise DevtoolError("This recipe does not support menuconfig option") | 47 | raise DevtoolError("This recipe does not support menuconfig option") |
| 52 | 48 | ||
| 53 | workspace_dir = os.path.join(config.workspace_path,'sources') | 49 | workspace_dir = os.path.join(config.workspace_path,'sources') |
| 54 | kconfigpath = rd.getVar('B') | ||
| 55 | pn_src = os.path.join(workspace_dir,pn) | 50 | pn_src = os.path.join(workspace_dir,pn) |
| 56 | 51 | ||
| 57 | # add check to see if oe_local_files exists or not | 52 | # add check to see if oe_local_files exists or not |
| @@ -70,7 +65,7 @@ def menuconfig(args, config, basepath, workspace): | |||
| 70 | logger.info('Launching menuconfig') | 65 | logger.info('Launching menuconfig') |
| 71 | exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) | 66 | exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) |
| 72 | fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg') | 67 | fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg') |
| 73 | res = standard._create_kconfig_diff(pn_src,rd,fragment) | 68 | standard._create_kconfig_diff(pn_src,rd,fragment) |
| 74 | 69 | ||
| 75 | return 0 | 70 | return 0 |
| 76 | 71 | ||
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index 05161942b7..1fd5947c41 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py | |||
| @@ -18,11 +18,13 @@ import argparse_oe | |||
| 18 | import scriptutils | 18 | import scriptutils |
| 19 | import errno | 19 | import errno |
| 20 | import glob | 20 | import glob |
| 21 | import filecmp | ||
| 22 | from collections import OrderedDict | 21 | from collections import OrderedDict |
| 22 | |||
| 23 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError | 23 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError |
| 24 | from devtool import parse_recipe | 24 | from devtool import parse_recipe |
| 25 | 25 | ||
| 26 | import bb.utils | ||
| 27 | |||
| 26 | logger = logging.getLogger('devtool') | 28 | logger = logging.getLogger('devtool') |
| 27 | 29 | ||
| 28 | override_branch_prefix = 'devtool-override-' | 30 | override_branch_prefix = 'devtool-override-' |
| @@ -30,7 +32,8 @@ override_branch_prefix = 'devtool-override-' | |||
| 30 | 32 | ||
| 31 | def add(args, config, basepath, workspace): | 33 | def add(args, config, basepath, workspace): |
| 32 | """Entry point for the devtool 'add' subcommand""" | 34 | """Entry point for the devtool 'add' subcommand""" |
| 33 | import bb | 35 | import bb.data |
| 36 | import bb.process | ||
| 34 | import oe.recipeutils | 37 | import oe.recipeutils |
| 35 | 38 | ||
| 36 | if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: | 39 | if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: |
| @@ -206,7 +209,7 @@ def add(args, config, basepath, workspace): | |||
| 206 | for fn in os.listdir(tempdir): | 209 | for fn in os.listdir(tempdir): |
| 207 | shutil.move(os.path.join(tempdir, fn), recipedir) | 210 | shutil.move(os.path.join(tempdir, fn), recipedir) |
| 208 | else: | 211 | else: |
| 209 | raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout)) | 212 | raise DevtoolError(f'Failed to create a recipe file for source {source}') |
| 210 | attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) | 213 | attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) |
| 211 | if os.path.exists(attic_recipe): | 214 | if os.path.exists(attic_recipe): |
| 212 | logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) | 215 | logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) |
| @@ -305,6 +308,7 @@ def add(args, config, basepath, workspace): | |||
| 305 | 308 | ||
| 306 | def _check_compatible_recipe(pn, d): | 309 | def _check_compatible_recipe(pn, d): |
| 307 | """Check if the recipe is supported by devtool""" | 310 | """Check if the recipe is supported by devtool""" |
| 311 | import bb.data | ||
| 308 | if pn == 'perf': | 312 | if pn == 'perf': |
| 309 | raise DevtoolError("The perf recipe does not actually check out " | 313 | raise DevtoolError("The perf recipe does not actually check out " |
| 310 | "source and thus cannot be supported by this tool", | 314 | "source and thus cannot be supported by this tool", |
| @@ -374,7 +378,7 @@ def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None): | |||
| 374 | 378 | ||
| 375 | def _git_ls_tree(repodir, treeish='HEAD', recursive=False): | 379 | def _git_ls_tree(repodir, treeish='HEAD', recursive=False): |
| 376 | """List contents of a git treeish""" | 380 | """List contents of a git treeish""" |
| 377 | import bb | 381 | import bb.process |
| 378 | cmd = ['git', 'ls-tree', '-z', treeish] | 382 | cmd = ['git', 'ls-tree', '-z', treeish] |
| 379 | if recursive: | 383 | if recursive: |
| 380 | cmd.append('-r') | 384 | cmd.append('-r') |
| @@ -387,6 +391,19 @@ def _git_ls_tree(repodir, treeish='HEAD', recursive=False): | |||
| 387 | ret[split[3]] = split[0:3] | 391 | ret[split[3]] = split[0:3] |
| 388 | return ret | 392 | return ret |
| 389 | 393 | ||
| 394 | def _git_modified(repodir): | ||
| 395 | """List the difference between HEAD and the index""" | ||
| 396 | import bb.process | ||
| 397 | cmd = ['git', 'status', '--porcelain'] | ||
| 398 | out, _ = bb.process.run(cmd, cwd=repodir) | ||
| 399 | ret = [] | ||
| 400 | if out: | ||
| 401 | for line in out.split("\n"): | ||
| 402 | if line and not line.startswith('??'): | ||
| 403 | ret.append(line[3:]) | ||
| 404 | return ret | ||
| 405 | |||
| 406 | |||
| 390 | def _git_exclude_path(srctree, path): | 407 | def _git_exclude_path(srctree, path): |
| 391 | """Return pathspec (list of paths) that excludes certain path""" | 408 | """Return pathspec (list of paths) that excludes certain path""" |
| 392 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - | 409 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - |
| @@ -414,8 +431,6 @@ def _ls_tree(directory): | |||
| 414 | 431 | ||
| 415 | def extract(args, config, basepath, workspace): | 432 | def extract(args, config, basepath, workspace): |
| 416 | """Entry point for the devtool 'extract' subcommand""" | 433 | """Entry point for the devtool 'extract' subcommand""" |
| 417 | import bb | ||
| 418 | |||
| 419 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 434 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
| 420 | if not tinfoil: | 435 | if not tinfoil: |
| 421 | # Error already shown | 436 | # Error already shown |
| @@ -438,8 +453,6 @@ def extract(args, config, basepath, workspace): | |||
| 438 | 453 | ||
| 439 | def sync(args, config, basepath, workspace): | 454 | def sync(args, config, basepath, workspace): |
| 440 | """Entry point for the devtool 'sync' subcommand""" | 455 | """Entry point for the devtool 'sync' subcommand""" |
| 441 | import bb | ||
| 442 | |||
| 443 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 456 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
| 444 | if not tinfoil: | 457 | if not tinfoil: |
| 445 | # Error already shown | 458 | # Error already shown |
| @@ -460,37 +473,11 @@ def sync(args, config, basepath, workspace): | |||
| 460 | finally: | 473 | finally: |
| 461 | tinfoil.shutdown() | 474 | tinfoil.shutdown() |
| 462 | 475 | ||
| 463 | def symlink_oelocal_files_srctree(rd, srctree): | ||
| 464 | import oe.patch | ||
| 465 | if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')): | ||
| 466 | # If recipe extracts to ${WORKDIR}, symlink the files into the srctree | ||
| 467 | # (otherwise the recipe won't build as expected) | ||
| 468 | local_files_dir = os.path.join(srctree, 'oe-local-files') | ||
| 469 | addfiles = [] | ||
| 470 | for root, _, files in os.walk(local_files_dir): | ||
| 471 | relpth = os.path.relpath(root, local_files_dir) | ||
| 472 | if relpth != '.': | ||
| 473 | bb.utils.mkdirhier(os.path.join(srctree, relpth)) | ||
| 474 | for fn in files: | ||
| 475 | if fn == '.gitignore': | ||
| 476 | continue | ||
| 477 | destpth = os.path.join(srctree, relpth, fn) | ||
| 478 | if os.path.exists(destpth): | ||
| 479 | os.unlink(destpth) | ||
| 480 | if relpth != '.': | ||
| 481 | back_relpth = os.path.relpath(local_files_dir, root) | ||
| 482 | os.symlink('%s/oe-local-files/%s/%s' % (back_relpth, relpth, fn), destpth) | ||
| 483 | else: | ||
| 484 | os.symlink('oe-local-files/%s' % fn, destpth) | ||
| 485 | addfiles.append(os.path.join(relpth, fn)) | ||
| 486 | if addfiles: | ||
| 487 | oe.patch.GitApplyTree.commitIgnored("Add local file symlinks", dir=srctree, files=addfiles, d=rd) | ||
| 488 | |||
| 489 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): | 476 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): |
| 490 | """Extract sources of a recipe""" | 477 | """Extract sources of a recipe""" |
| 491 | import oe.recipeutils | ||
| 492 | import oe.patch | ||
| 493 | import oe.path | 478 | import oe.path |
| 479 | import bb.data | ||
| 480 | import bb.process | ||
| 494 | 481 | ||
| 495 | pn = d.getVar('PN') | 482 | pn = d.getVar('PN') |
| 496 | 483 | ||
| @@ -555,6 +542,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
| 555 | tempbasedir = d.getVar('WORKDIR') | 542 | tempbasedir = d.getVar('WORKDIR') |
| 556 | bb.utils.mkdirhier(tempbasedir) | 543 | bb.utils.mkdirhier(tempbasedir) |
| 557 | tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) | 544 | tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) |
| 545 | appendbackup = None | ||
| 558 | try: | 546 | try: |
| 559 | tinfoil.logger.setLevel(logging.WARNING) | 547 | tinfoil.logger.setLevel(logging.WARNING) |
| 560 | 548 | ||
| @@ -565,7 +553,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
| 565 | appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') | 553 | appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') |
| 566 | shutil.copyfile(appendfile, appendbackup) | 554 | shutil.copyfile(appendfile, appendbackup) |
| 567 | else: | 555 | else: |
| 568 | appendbackup = None | ||
| 569 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 556 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
| 570 | logger.debug('writing append file %s' % appendfile) | 557 | logger.debug('writing append file %s' % appendfile) |
| 571 | with open(appendfile, 'a') as f: | 558 | with open(appendfile, 'a') as f: |
| @@ -638,7 +625,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
| 638 | srcsubdir = f.read() | 625 | srcsubdir = f.read() |
| 639 | except FileNotFoundError as e: | 626 | except FileNotFoundError as e: |
| 640 | raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) | 627 | raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) |
| 641 | srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir')) | 628 | srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir', os.path.relpath(d.getVar('UNPACKDIR'), d.getVar('WORKDIR')))) |
| 642 | 629 | ||
| 643 | # Check if work-shared is empty, if yes | 630 | # Check if work-shared is empty, if yes |
| 644 | # find source and copy to work-shared | 631 | # find source and copy to work-shared |
| @@ -657,9 +644,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
| 657 | elif not os.path.exists(workshareddir): | 644 | elif not os.path.exists(workshareddir): |
| 658 | oe.path.copyhardlinktree(srcsubdir, workshareddir) | 645 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
| 659 | 646 | ||
| 660 | tempdir_localdir = os.path.join(tempdir, 'oe-local-files') | ||
| 661 | srctree_localdir = os.path.join(srctree, 'oe-local-files') | ||
| 662 | |||
| 663 | if sync: | 647 | if sync: |
| 664 | try: | 648 | try: |
| 665 | logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) | 649 | logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) |
| @@ -674,29 +658,8 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
| 674 | except bb.process.ExecutionError as e: | 658 | except bb.process.ExecutionError as e: |
| 675 | raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) | 659 | raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) |
| 676 | 660 | ||
| 677 | # Move the oe-local-files directory to srctree. | ||
| 678 | # As oe-local-files is not part of the constructed git tree, | ||
| 679 | # removing it directly during the synchronization might surprise | ||
| 680 | # the user. Instead, we move it to oe-local-files.bak and remind | ||
| 681 | # the user in the log message. | ||
| 682 | if os.path.exists(srctree_localdir + '.bak'): | ||
| 683 | shutil.rmtree(srctree_localdir + '.bak') | ||
| 684 | |||
| 685 | if os.path.exists(srctree_localdir): | ||
| 686 | logger.info('Backing up current local file directory %s' % srctree_localdir) | ||
| 687 | shutil.move(srctree_localdir, srctree_localdir + '.bak') | ||
| 688 | |||
| 689 | if os.path.exists(tempdir_localdir): | ||
| 690 | logger.info('Syncing local source files to srctree...') | ||
| 691 | shutil.copytree(tempdir_localdir, srctree_localdir) | ||
| 692 | else: | 661 | else: |
| 693 | # Move oe-local-files directory to srctree | ||
| 694 | if os.path.exists(tempdir_localdir): | ||
| 695 | logger.info('Adding local source files to srctree...') | ||
| 696 | shutil.move(tempdir_localdir, srcsubdir) | ||
| 697 | |||
| 698 | shutil.move(srcsubdir, srctree) | 662 | shutil.move(srcsubdir, srctree) |
| 699 | symlink_oelocal_files_srctree(d, srctree) | ||
| 700 | 663 | ||
| 701 | if is_kernel_yocto: | 664 | if is_kernel_yocto: |
| 702 | logger.info('Copying kernel config to srctree') | 665 | logger.info('Copying kernel config to srctree') |
| @@ -715,8 +678,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
| 715 | 678 | ||
| 716 | def _add_md5(config, recipename, filename): | 679 | def _add_md5(config, recipename, filename): |
| 717 | """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" | 680 | """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" |
| 718 | import bb.utils | ||
| 719 | |||
| 720 | def addfile(fn): | 681 | def addfile(fn): |
| 721 | md5 = bb.utils.md5_file(fn) | 682 | md5 = bb.utils.md5_file(fn) |
| 722 | with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: | 683 | with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: |
| @@ -735,7 +696,6 @@ def _add_md5(config, recipename, filename): | |||
| 735 | def _check_preserve(config, recipename): | 696 | def _check_preserve(config, recipename): |
| 736 | """Check if a file was manually changed and needs to be saved in 'attic' | 697 | """Check if a file was manually changed and needs to be saved in 'attic' |
| 737 | directory""" | 698 | directory""" |
| 738 | import bb.utils | ||
| 739 | origfile = os.path.join(config.workspace_path, '.devtool_md5') | 699 | origfile = os.path.join(config.workspace_path, '.devtool_md5') |
| 740 | newfile = os.path.join(config.workspace_path, '.devtool_md5_new') | 700 | newfile = os.path.join(config.workspace_path, '.devtool_md5_new') |
| 741 | preservepath = os.path.join(config.workspace_path, 'attic', recipename) | 701 | preservepath = os.path.join(config.workspace_path, 'attic', recipename) |
| @@ -766,36 +726,36 @@ def _check_preserve(config, recipename): | |||
| 766 | 726 | ||
| 767 | def get_staging_kver(srcdir): | 727 | def get_staging_kver(srcdir): |
| 768 | # Kernel version from work-shared | 728 | # Kernel version from work-shared |
| 769 | kerver = [] | 729 | import itertools |
| 770 | staging_kerVer="" | 730 | try: |
| 771 | if os.path.exists(srcdir) and os.listdir(srcdir): | ||
| 772 | with open(os.path.join(srcdir, "Makefile")) as f: | 731 | with open(os.path.join(srcdir, "Makefile")) as f: |
| 773 | version = [next(f) for x in range(5)][1:4] | 732 | # Take VERSION, PATCHLEVEL, SUBLEVEL from lines 1, 2, 3 |
| 774 | for word in version: | 733 | return ".".join(line.rstrip().split('= ')[1] for line in itertools.islice(f, 1, 4)) |
| 775 | kerver.append(word.split('= ')[1].split('\n')[0]) | 734 | except FileNotFoundError: |
| 776 | staging_kerVer = ".".join(kerver) | 735 | return "" |
| 777 | return staging_kerVer | ||
| 778 | 736 | ||
| 779 | def get_staging_kbranch(srcdir): | 737 | def get_staging_kbranch(srcdir): |
| 738 | import bb.process | ||
| 780 | staging_kbranch = "" | 739 | staging_kbranch = "" |
| 781 | if os.path.exists(srcdir) and os.listdir(srcdir): | 740 | if os.path.exists(srcdir) and os.listdir(srcdir): |
| 782 | (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir) | 741 | (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir) |
| 783 | staging_kbranch = "".join(branch.split('\n')[0]) | 742 | staging_kbranch = "".join(branch.split('\n')[0]) |
| 784 | return staging_kbranch | 743 | return staging_kbranch |
| 785 | 744 | ||
| 786 | def get_real_srctree(srctree, s, workdir): | 745 | def get_real_srctree(srctree, s, unpackdir): |
| 787 | # Check that recipe isn't using a shared workdir | 746 | # Check that recipe isn't using a shared workdir |
| 788 | s = os.path.abspath(s) | 747 | s = os.path.abspath(s) |
| 789 | workdir = os.path.abspath(workdir) | 748 | unpackdir = os.path.abspath(unpackdir) |
| 790 | if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: | 749 | if s.startswith(unpackdir) and s != unpackdir and os.path.dirname(s) != unpackdir: |
| 791 | # Handle if S is set to a subdirectory of the source | 750 | # Handle if S is set to a subdirectory of the source |
| 792 | srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] | 751 | srcsubdir = os.path.relpath(s, unpackdir).split(os.sep, 1)[1] |
| 793 | srctree = os.path.join(srctree, srcsubdir) | 752 | srctree = os.path.join(srctree, srcsubdir) |
| 794 | return srctree | 753 | return srctree |
| 795 | 754 | ||
| 796 | def modify(args, config, basepath, workspace): | 755 | def modify(args, config, basepath, workspace): |
| 797 | """Entry point for the devtool 'modify' subcommand""" | 756 | """Entry point for the devtool 'modify' subcommand""" |
| 798 | import bb | 757 | import bb.data |
| 758 | import bb.process | ||
| 799 | import oe.recipeutils | 759 | import oe.recipeutils |
| 800 | import oe.patch | 760 | import oe.patch |
| 801 | import oe.path | 761 | import oe.path |
| @@ -851,35 +811,21 @@ def modify(args, config, basepath, workspace): | |||
| 851 | staging_kbranch = get_staging_kbranch(srcdir) | 811 | staging_kbranch = get_staging_kbranch(srcdir) |
| 852 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): | 812 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): |
| 853 | oe.path.copyhardlinktree(srcdir, srctree) | 813 | oe.path.copyhardlinktree(srcdir, srctree) |
| 854 | workdir = rd.getVar('WORKDIR') | 814 | unpackdir = rd.getVar('UNPACKDIR') |
| 855 | srcsubdir = rd.getVar('S') | 815 | srcsubdir = rd.getVar('S') |
| 856 | localfilesdir = os.path.join(srctree, 'oe-local-files') | ||
| 857 | # Move local source files into separate subdir | ||
| 858 | recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)] | ||
| 859 | local_files = oe.recipeutils.get_recipe_local_files(rd) | ||
| 860 | 816 | ||
| 861 | for key in local_files.copy(): | 817 | # Add locally copied files to gitignore as we add back to the metadata directly |
| 862 | if key.endswith('scc'): | 818 | local_files = oe.recipeutils.get_recipe_local_files(rd) |
| 863 | sccfile = open(local_files[key], 'r') | ||
| 864 | for l in sccfile: | ||
| 865 | line = l.split() | ||
| 866 | if line and line[0] in ('kconf', 'patch'): | ||
| 867 | cfg = os.path.join(os.path.dirname(local_files[key]), line[-1]) | ||
| 868 | if not cfg in local_files.values(): | ||
| 869 | local_files[line[-1]] = cfg | ||
| 870 | shutil.copy2(cfg, workdir) | ||
| 871 | sccfile.close() | ||
| 872 | |||
| 873 | # Ignore local files with subdir={BP} | ||
| 874 | srcabspath = os.path.abspath(srcsubdir) | 819 | srcabspath = os.path.abspath(srcsubdir) |
| 875 | local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))] | 820 | local_files = [fname for fname in local_files if |
| 821 | os.path.exists(os.path.join(unpackdir, fname)) and | ||
| 822 | srcabspath == unpackdir] | ||
| 876 | if local_files: | 823 | if local_files: |
| 877 | for fname in local_files: | 824 | with open(os.path.join(srctree, '.gitignore'), 'a+') as f: |
| 878 | _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) | 825 | f.write('# Ignore local files, by default. Remove following lines' |
| 879 | with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: | 826 | 'if you want to commit the directory to Git\n') |
| 880 | f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n*\n') | 827 | for fname in local_files: |
| 881 | 828 | f.write('%s\n' % fname) | |
| 882 | symlink_oelocal_files_srctree(rd, srctree) | ||
| 883 | 829 | ||
| 884 | task = 'do_configure' | 830 | task = 'do_configure' |
| 885 | res = tinfoil.build_targets(pn, task, handle_events=True) | 831 | res = tinfoil.build_targets(pn, task, handle_events=True) |
| @@ -961,7 +907,7 @@ def modify(args, config, basepath, workspace): | |||
| 961 | 907 | ||
| 962 | # Need to grab this here in case the source is within a subdirectory | 908 | # Need to grab this here in case the source is within a subdirectory |
| 963 | srctreebase = srctree | 909 | srctreebase = srctree |
| 964 | srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) | 910 | srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR')) |
| 965 | 911 | ||
| 966 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 912 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
| 967 | with open(appendfile, 'w') as f: | 913 | with open(appendfile, 'w') as f: |
| @@ -1001,13 +947,6 @@ def modify(args, config, basepath, workspace): | |||
| 1001 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) | 947 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) |
| 1002 | 948 | ||
| 1003 | if bb.data.inherits_class('kernel', rd): | 949 | if bb.data.inherits_class('kernel', rd): |
| 1004 | f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' | ||
| 1005 | 'do_fetch do_unpack do_kernel_configcheck"\n') | ||
| 1006 | f.write('\ndo_patch[noexec] = "1"\n') | ||
| 1007 | f.write('\ndo_configure:append() {\n' | ||
| 1008 | ' cp ${B}/.config ${S}/.config.baseline\n' | ||
| 1009 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | ||
| 1010 | '}\n') | ||
| 1011 | f.write('\ndo_kernel_configme:prepend() {\n' | 950 | f.write('\ndo_kernel_configme:prepend() {\n' |
| 1012 | ' if [ -e ${S}/.config ]; then\n' | 951 | ' if [ -e ${S}/.config ]; then\n' |
| 1013 | ' mv ${S}/.config ${S}/.config.old\n' | 952 | ' mv ${S}/.config ${S}/.config.old\n' |
| @@ -1031,6 +970,8 @@ def modify(args, config, basepath, workspace): | |||
| 1031 | if branch == args.branch: | 970 | if branch == args.branch: |
| 1032 | continue | 971 | continue |
| 1033 | f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) | 972 | f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) |
| 973 | if args.debug_build: | ||
| 974 | f.write('\nDEBUG_BUILD = "1"\n') | ||
| 1034 | 975 | ||
| 1035 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) | 976 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) |
| 1036 | 977 | ||
| @@ -1075,6 +1016,7 @@ def rename(args, config, basepath, workspace): | |||
| 1075 | origfnver = '' | 1016 | origfnver = '' |
| 1076 | 1017 | ||
| 1077 | recipefilemd5 = None | 1018 | recipefilemd5 = None |
| 1019 | newrecipefilemd5 = None | ||
| 1078 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 1020 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
| 1079 | try: | 1021 | try: |
| 1080 | rd = parse_recipe(config, tinfoil, args.recipename, True) | 1022 | rd = parse_recipe(config, tinfoil, args.recipename, True) |
| @@ -1152,6 +1094,7 @@ def rename(args, config, basepath, workspace): | |||
| 1152 | 1094 | ||
| 1153 | # Rename source tree if it's the default path | 1095 | # Rename source tree if it's the default path |
| 1154 | appendmd5 = None | 1096 | appendmd5 = None |
| 1097 | newappendmd5 = None | ||
| 1155 | if not args.no_srctree: | 1098 | if not args.no_srctree: |
| 1156 | srctree = workspace[args.recipename]['srctree'] | 1099 | srctree = workspace[args.recipename]['srctree'] |
| 1157 | if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): | 1100 | if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): |
| @@ -1240,7 +1183,7 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre | |||
| 1240 | """Get initial and update rev of a recipe. These are the start point of the | 1183 | """Get initial and update rev of a recipe. These are the start point of the |
| 1241 | whole patchset and start point for the patches to be re-generated/updated. | 1184 | whole patchset and start point for the patches to be re-generated/updated. |
| 1242 | """ | 1185 | """ |
| 1243 | import bb | 1186 | import bb.process |
| 1244 | 1187 | ||
| 1245 | # Get current branch | 1188 | # Get current branch |
| 1246 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', | 1189 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', |
| @@ -1366,6 +1309,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): | |||
| 1366 | """ | 1309 | """ |
| 1367 | import oe.recipeutils | 1310 | import oe.recipeutils |
| 1368 | from oe.patch import GitApplyTree | 1311 | from oe.patch import GitApplyTree |
| 1312 | import bb.process | ||
| 1369 | updated = OrderedDict() | 1313 | updated = OrderedDict() |
| 1370 | added = OrderedDict() | 1314 | added = OrderedDict() |
| 1371 | seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') | 1315 | seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') |
| @@ -1387,6 +1331,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): | |||
| 1387 | # values, but they ought to be anyway... | 1331 | # values, but they ought to be anyway... |
| 1388 | new_basename = seqpatch_re.match(new_patch).group(2) | 1332 | new_basename = seqpatch_re.match(new_patch).group(2) |
| 1389 | match_name = None | 1333 | match_name = None |
| 1334 | old_patch = None | ||
| 1390 | for old_patch in existing_patches: | 1335 | for old_patch in existing_patches: |
| 1391 | old_basename = seqpatch_re.match(old_patch).group(2) | 1336 | old_basename = seqpatch_re.match(old_patch).group(2) |
| 1392 | old_basename_splitext = os.path.splitext(old_basename) | 1337 | old_basename_splitext = os.path.splitext(old_basename) |
| @@ -1435,6 +1380,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): | |||
| 1435 | 1380 | ||
| 1436 | def _create_kconfig_diff(srctree, rd, outfile): | 1381 | def _create_kconfig_diff(srctree, rd, outfile): |
| 1437 | """Create a kconfig fragment""" | 1382 | """Create a kconfig fragment""" |
| 1383 | import bb.process | ||
| 1438 | # Only update config fragment if both config files exist | 1384 | # Only update config fragment if both config files exist |
| 1439 | orig_config = os.path.join(srctree, '.config.baseline') | 1385 | orig_config = os.path.join(srctree, '.config.baseline') |
| 1440 | new_config = os.path.join(srctree, '.config.new') | 1386 | new_config = os.path.join(srctree, '.config.new') |
| @@ -1472,12 +1418,15 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
| 1472 | - for removed dict, the absolute path to the existing file in recipe space | 1418 | - for removed dict, the absolute path to the existing file in recipe space |
| 1473 | """ | 1419 | """ |
| 1474 | import oe.recipeutils | 1420 | import oe.recipeutils |
| 1421 | import bb.data | ||
| 1422 | import bb.process | ||
| 1475 | 1423 | ||
| 1476 | # Find out local files (SRC_URI files that exist in the "recipe space"). | 1424 | # Find out local files (SRC_URI files that exist in the "recipe space"). |
| 1477 | # Local files that reside in srctree are not included in patch generation. | 1425 | # Local files that reside in srctree are not included in patch generation. |
| 1478 | # Instead they are directly copied over the original source files (in | 1426 | # Instead they are directly copied over the original source files (in |
| 1479 | # recipe space). | 1427 | # recipe space). |
| 1480 | existing_files = oe.recipeutils.get_recipe_local_files(rd) | 1428 | existing_files = oe.recipeutils.get_recipe_local_files(rd) |
| 1429 | |||
| 1481 | new_set = None | 1430 | new_set = None |
| 1482 | updated = OrderedDict() | 1431 | updated = OrderedDict() |
| 1483 | added = OrderedDict() | 1432 | added = OrderedDict() |
| @@ -1494,24 +1443,28 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
| 1494 | if branchname.startswith(override_branch_prefix): | 1443 | if branchname.startswith(override_branch_prefix): |
| 1495 | return (updated, added, removed) | 1444 | return (updated, added, removed) |
| 1496 | 1445 | ||
| 1497 | local_files_dir = os.path.join(srctreebase, 'oe-local-files') | 1446 | files = _git_modified(srctree) |
| 1498 | git_files = _git_ls_tree(srctree) | 1447 | #if not files: |
| 1499 | if 'oe-local-files' in git_files: | 1448 | # files = _ls_tree(srctree) |
| 1500 | # If tracked by Git, take the files from srctree HEAD. First get | 1449 | for f in files: |
| 1501 | # the tree object of the directory | 1450 | fullfile = os.path.join(srctree, f) |
| 1502 | tmp_index = os.path.join(srctree, '.git', 'index.tmp.devtool') | 1451 | if os.path.exists(os.path.join(fullfile, ".git")): |
| 1503 | tree = git_files['oe-local-files'][2] | 1452 | # submodules handled elsewhere |
| 1504 | bb.process.run(['git', 'checkout', tree, '--', '.'], cwd=srctree, | 1453 | continue |
| 1505 | env=dict(os.environ, GIT_WORK_TREE=destdir, | 1454 | if f not in existing_files: |
| 1506 | GIT_INDEX_FILE=tmp_index)) | 1455 | added[f] = {} |
| 1507 | new_set = list(_git_ls_tree(srctree, tree, True).keys()) | 1456 | if os.path.isdir(os.path.join(srctree, f)): |
| 1508 | elif os.path.isdir(local_files_dir): | 1457 | shutil.copytree(fullfile, os.path.join(destdir, f)) |
| 1509 | # If not tracked by Git, just copy from working copy | 1458 | else: |
| 1510 | new_set = _ls_tree(local_files_dir) | 1459 | shutil.copy2(fullfile, os.path.join(destdir, f)) |
| 1511 | bb.process.run(['cp', '-ax', | 1460 | elif not os.path.exists(fullfile): |
| 1512 | os.path.join(local_files_dir, '.'), destdir]) | 1461 | removed[f] = existing_files[f] |
| 1513 | else: | 1462 | elif f in existing_files: |
| 1514 | new_set = [] | 1463 | updated[f] = {'path' : existing_files[f]} |
| 1464 | if os.path.isdir(os.path.join(srctree, f)): | ||
| 1465 | shutil.copytree(fullfile, os.path.join(destdir, f)) | ||
| 1466 | else: | ||
| 1467 | shutil.copy2(fullfile, os.path.join(destdir, f)) | ||
| 1515 | 1468 | ||
| 1516 | # Special handling for kernel config | 1469 | # Special handling for kernel config |
| 1517 | if bb.data.inherits_class('kernel-yocto', rd): | 1470 | if bb.data.inherits_class('kernel-yocto', rd): |
| @@ -1519,17 +1472,14 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
| 1519 | fragment_path = os.path.join(destdir, fragment_fn) | 1472 | fragment_path = os.path.join(destdir, fragment_fn) |
| 1520 | if _create_kconfig_diff(srctree, rd, fragment_path): | 1473 | if _create_kconfig_diff(srctree, rd, fragment_path): |
| 1521 | if os.path.exists(fragment_path): | 1474 | if os.path.exists(fragment_path): |
| 1522 | if fragment_fn not in new_set: | 1475 | if fragment_fn in removed: |
| 1523 | new_set.append(fragment_fn) | 1476 | del removed[fragment_fn] |
| 1524 | # Copy fragment to local-files | 1477 | if fragment_fn not in updated and fragment_fn not in added: |
| 1525 | if os.path.isdir(local_files_dir): | 1478 | added[fragment_fn] = {} |
| 1526 | shutil.copy2(fragment_path, local_files_dir) | ||
| 1527 | else: | 1479 | else: |
| 1528 | if fragment_fn in new_set: | 1480 | if fragment_fn in updated: |
| 1529 | new_set.remove(fragment_fn) | 1481 | removed[fragment_fn] = updated[fragment_fn] |
| 1530 | # Remove fragment from local-files | 1482 | del updated[fragment_fn] |
| 1531 | if os.path.exists(os.path.join(local_files_dir, fragment_fn)): | ||
| 1532 | os.unlink(os.path.join(local_files_dir, fragment_fn)) | ||
| 1533 | 1483 | ||
| 1534 | # Special handling for cml1, ccmake, etc bbclasses that generated | 1484 | # Special handling for cml1, ccmake, etc bbclasses that generated |
| 1535 | # configuration fragment files that are consumed as source files | 1485 | # configuration fragment files that are consumed as source files |
| @@ -1537,42 +1487,13 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
| 1537 | if bb.data.inherits_class(frag_class, rd): | 1487 | if bb.data.inherits_class(frag_class, rd): |
| 1538 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) | 1488 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) |
| 1539 | if os.path.exists(srcpath): | 1489 | if os.path.exists(srcpath): |
| 1540 | if frag_name not in new_set: | 1490 | if frag_name in removed: |
| 1541 | new_set.append(frag_name) | 1491 | del removed[frag_name] |
| 1492 | if frag_name not in updated: | ||
| 1493 | added[frag_name] = {} | ||
| 1542 | # copy fragment into destdir | 1494 | # copy fragment into destdir |
| 1543 | shutil.copy2(srcpath, destdir) | 1495 | shutil.copy2(srcpath, destdir) |
| 1544 | # copy fragment into local files if exists | 1496 | |
| 1545 | if os.path.isdir(local_files_dir): | ||
| 1546 | shutil.copy2(srcpath, local_files_dir) | ||
| 1547 | |||
| 1548 | if new_set is not None: | ||
| 1549 | for fname in new_set: | ||
| 1550 | if fname in existing_files: | ||
| 1551 | origpath = existing_files.pop(fname) | ||
| 1552 | workpath = os.path.join(local_files_dir, fname) | ||
| 1553 | if not filecmp.cmp(origpath, workpath): | ||
| 1554 | updated[fname] = {'path' : origpath} | ||
| 1555 | elif fname != '.gitignore': | ||
| 1556 | added[fname] = {} | ||
| 1557 | |||
| 1558 | workdir = rd.getVar('WORKDIR') | ||
| 1559 | s = rd.getVar('S') | ||
| 1560 | if not s.endswith(os.sep): | ||
| 1561 | s += os.sep | ||
| 1562 | |||
| 1563 | if workdir != s: | ||
| 1564 | # Handle files where subdir= was specified | ||
| 1565 | for fname in list(existing_files.keys()): | ||
| 1566 | # FIXME handle both subdir starting with BP and not? | ||
| 1567 | fworkpath = os.path.join(workdir, fname) | ||
| 1568 | if fworkpath.startswith(s): | ||
| 1569 | fpath = os.path.join(srctree, os.path.relpath(fworkpath, s)) | ||
| 1570 | if os.path.exists(fpath): | ||
| 1571 | origpath = existing_files.pop(fname) | ||
| 1572 | if not filecmp.cmp(origpath, fpath): | ||
| 1573 | updated[fpath] = {'path' : origpath} | ||
| 1574 | |||
| 1575 | removed = existing_files | ||
| 1576 | return (updated, added, removed) | 1497 | return (updated, added, removed) |
| 1577 | 1498 | ||
| 1578 | 1499 | ||
| @@ -1590,7 +1511,7 @@ def _determine_files_dir(rd): | |||
| 1590 | 1511 | ||
| 1591 | def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): | 1512 | def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): |
| 1592 | """Implement the 'srcrev' mode of update-recipe""" | 1513 | """Implement the 'srcrev' mode of update-recipe""" |
| 1593 | import bb | 1514 | import bb.process |
| 1594 | import oe.recipeutils | 1515 | import oe.recipeutils |
| 1595 | 1516 | ||
| 1596 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' | 1517 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' |
| @@ -1628,6 +1549,7 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
| 1628 | local_files_dir = tempfile.mkdtemp(dir=tempdir) | 1549 | local_files_dir = tempfile.mkdtemp(dir=tempdir) |
| 1629 | srctreebase = workspace[recipename]['srctreebase'] | 1550 | srctreebase = workspace[recipename]['srctreebase'] |
| 1630 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) | 1551 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) |
| 1552 | removedentries = {} | ||
| 1631 | if not no_remove: | 1553 | if not no_remove: |
| 1632 | # Find list of existing patches in recipe file | 1554 | # Find list of existing patches in recipe file |
| 1633 | patches_dir = tempfile.mkdtemp(dir=tempdir) | 1555 | patches_dir = tempfile.mkdtemp(dir=tempdir) |
| @@ -1691,7 +1613,6 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
| 1691 | 1613 | ||
| 1692 | def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): | 1614 | def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): |
| 1693 | """Implement the 'patch' mode of update-recipe""" | 1615 | """Implement the 'patch' mode of update-recipe""" |
| 1694 | import bb | ||
| 1695 | import oe.recipeutils | 1616 | import oe.recipeutils |
| 1696 | 1617 | ||
| 1697 | recipefile = rd.getVar('FILE') | 1618 | recipefile = rd.getVar('FILE') |
| @@ -1805,6 +1726,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
| 1805 | for basepath, param in upd_p.items(): | 1726 | for basepath, param in upd_p.items(): |
| 1806 | path = param['path'] | 1727 | path = param['path'] |
| 1807 | patchdir = param.get('patchdir', ".") | 1728 | patchdir = param.get('patchdir', ".") |
| 1729 | patchdir_param = {} | ||
| 1808 | if patchdir != "." : | 1730 | if patchdir != "." : |
| 1809 | patchdir_param = dict(patchdir_params) | 1731 | patchdir_param = dict(patchdir_params) |
| 1810 | if patchdir_param: | 1732 | if patchdir_param: |
| @@ -1870,6 +1792,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
| 1870 | 1792 | ||
| 1871 | def _guess_recipe_update_mode(srctree, rdata): | 1793 | def _guess_recipe_update_mode(srctree, rdata): |
| 1872 | """Guess the recipe update mode to use""" | 1794 | """Guess the recipe update mode to use""" |
| 1795 | import bb.process | ||
| 1873 | src_uri = (rdata.getVar('SRC_URI') or '').split() | 1796 | src_uri = (rdata.getVar('SRC_URI') or '').split() |
| 1874 | git_uris = [uri for uri in src_uri if uri.startswith('git://')] | 1797 | git_uris = [uri for uri in src_uri if uri.startswith('git://')] |
| 1875 | if not git_uris: | 1798 | if not git_uris: |
| @@ -1891,6 +1814,8 @@ def _guess_recipe_update_mode(srctree, rdata): | |||
| 1891 | return 'patch' | 1814 | return 'patch' |
| 1892 | 1815 | ||
| 1893 | def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): | 1816 | def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): |
| 1817 | import bb.data | ||
| 1818 | import bb.process | ||
| 1894 | srctree = workspace[recipename]['srctree'] | 1819 | srctree = workspace[recipename]['srctree'] |
| 1895 | if mode == 'auto': | 1820 | if mode == 'auto': |
| 1896 | mode = _guess_recipe_update_mode(srctree, rd) | 1821 | mode = _guess_recipe_update_mode(srctree, rd) |
| @@ -2013,6 +1938,7 @@ def status(args, config, basepath, workspace): | |||
| 2013 | 1938 | ||
| 2014 | def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | 1939 | def _reset(recipes, no_clean, remove_work, config, basepath, workspace): |
| 2015 | """Reset one or more recipes""" | 1940 | """Reset one or more recipes""" |
| 1941 | import bb.process | ||
| 2016 | import oe.path | 1942 | import oe.path |
| 2017 | 1943 | ||
| 2018 | def clean_preferred_provider(pn, layerconf_path): | 1944 | def clean_preferred_provider(pn, layerconf_path): |
| @@ -2025,7 +1951,7 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
| 2025 | lines = f.readlines() | 1951 | lines = f.readlines() |
| 2026 | with open(new_layerconf_file, 'a') as nf: | 1952 | with open(new_layerconf_file, 'a') as nf: |
| 2027 | for line in lines: | 1953 | for line in lines: |
| 2028 | pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + pn + r'"$' | 1954 | pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + re.escape(pn) + r'"$' |
| 2029 | if not re.match(pprovider_exp, line): | 1955 | if not re.match(pprovider_exp, line): |
| 2030 | nf.write(line) | 1956 | nf.write(line) |
| 2031 | else: | 1957 | else: |
| @@ -2116,8 +2042,6 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
| 2116 | 2042 | ||
| 2117 | def reset(args, config, basepath, workspace): | 2043 | def reset(args, config, basepath, workspace): |
| 2118 | """Entry point for the devtool 'reset' subcommand""" | 2044 | """Entry point for the devtool 'reset' subcommand""" |
| 2119 | import bb | ||
| 2120 | import shutil | ||
| 2121 | 2045 | ||
| 2122 | recipes = "" | 2046 | recipes = "" |
| 2123 | 2047 | ||
| @@ -2396,6 +2320,7 @@ def register_commands(subparsers, context): | |||
| 2396 | parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') | 2320 | parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') |
| 2397 | parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') | 2321 | parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') |
| 2398 | parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") | 2322 | parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") |
| 2323 | parser_modify.add_argument('--debug-build', action="store_true", help='Add DEBUG_BUILD = "1" to the modified recipe') | ||
| 2399 | parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) | 2324 | parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) |
| 2400 | 2325 | ||
| 2401 | parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', | 2326 | parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', |
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py index fa5b8ef3c7..dda0a58098 100644 --- a/scripts/lib/devtool/upgrade.py +++ b/scripts/lib/devtool/upgrade.py | |||
| @@ -32,7 +32,7 @@ def _run(cmd, cwd=''): | |||
| 32 | 32 | ||
| 33 | def _get_srctree(tmpdir): | 33 | def _get_srctree(tmpdir): |
| 34 | srctree = tmpdir | 34 | srctree = tmpdir |
| 35 | dirs = scriptutils.filter_src_subdirs(tmpdir) | 35 | dirs = os.listdir(tmpdir) |
| 36 | if len(dirs) == 1: | 36 | if len(dirs) == 1: |
| 37 | srctree = os.path.join(tmpdir, dirs[0]) | 37 | srctree = os.path.join(tmpdir, dirs[0]) |
| 38 | else: | 38 | else: |
| @@ -76,19 +76,19 @@ def _rename_recipe_dirs(oldpv, newpv, path): | |||
| 76 | bb.utils.rename(os.path.join(path, oldfile), | 76 | bb.utils.rename(os.path.join(path, oldfile), |
| 77 | os.path.join(path, newfile)) | 77 | os.path.join(path, newfile)) |
| 78 | 78 | ||
| 79 | def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path): | 79 | def _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path): |
| 80 | oldrecipe = os.path.basename(oldrecipe) | 80 | oldrecipe = os.path.basename(oldrecipe) |
| 81 | if oldrecipe.endswith('_%s.bb' % oldpv): | 81 | if oldrecipe.endswith('_%s.bb' % oldpv): |
| 82 | newrecipe = '%s_%s.bb' % (bpn, newpv) | 82 | newrecipe = '%s_%s.bb' % (pn, newpv) |
| 83 | if oldrecipe != newrecipe: | 83 | if oldrecipe != newrecipe: |
| 84 | shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) | 84 | shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) |
| 85 | else: | 85 | else: |
| 86 | newrecipe = oldrecipe | 86 | newrecipe = oldrecipe |
| 87 | return os.path.join(path, newrecipe) | 87 | return os.path.join(path, newrecipe) |
| 88 | 88 | ||
| 89 | def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path): | 89 | def _rename_recipe_files(oldrecipe, pn, oldpv, newpv, path): |
| 90 | _rename_recipe_dirs(oldpv, newpv, path) | 90 | _rename_recipe_dirs(oldpv, newpv, path) |
| 91 | return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path) | 91 | return _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path) |
| 92 | 92 | ||
| 93 | def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): | 93 | def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): |
| 94 | """Writes an append file""" | 94 | """Writes an append file""" |
| @@ -169,6 +169,7 @@ def _get_uri(rd): | |||
| 169 | 169 | ||
| 170 | def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd): | 170 | def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd): |
| 171 | """Extract sources of a recipe with a new version""" | 171 | """Extract sources of a recipe with a new version""" |
| 172 | import oe.patch | ||
| 172 | 173 | ||
| 173 | def __run(cmd): | 174 | def __run(cmd): |
| 174 | """Simple wrapper which calls _run with srctree as cwd""" | 175 | """Simple wrapper which calls _run with srctree as cwd""" |
| @@ -187,9 +188,9 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
| 187 | if uri.startswith('git://') or uri.startswith('gitsm://'): | 188 | if uri.startswith('git://') or uri.startswith('gitsm://'): |
| 188 | __run('git fetch') | 189 | __run('git fetch') |
| 189 | __run('git checkout %s' % rev) | 190 | __run('git checkout %s' % rev) |
| 190 | __run('git tag -f devtool-base-new') | 191 | __run('git tag -f --no-sign devtool-base-new') |
| 191 | __run('git submodule update --recursive') | 192 | __run('git submodule update --recursive') |
| 192 | __run('git submodule foreach \'git tag -f devtool-base-new\'') | 193 | __run('git submodule foreach \'git tag -f --no-sign devtool-base-new\'') |
| 193 | (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'') | 194 | (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'') |
| 194 | paths += [os.path.join(srctree, p) for p in stdout.splitlines()] | 195 | paths += [os.path.join(srctree, p) for p in stdout.splitlines()] |
| 195 | checksums = {} | 196 | checksums = {} |
| @@ -256,7 +257,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
| 256 | useroptions = [] | 257 | useroptions = [] |
| 257 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) | 258 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) |
| 258 | __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) | 259 | __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) |
| 259 | __run('git tag -f devtool-base-%s' % newpv) | 260 | __run('git tag -f --no-sign devtool-base-%s' % newpv) |
| 260 | 261 | ||
| 261 | revs = {} | 262 | revs = {} |
| 262 | for path in paths: | 263 | for path in paths: |
| @@ -279,7 +280,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
| 279 | # note is still attached to the old commit. Avoid this by making | 280 | # note is still attached to the old commit. Avoid this by making |
| 280 | # sure all old devtool related commits have a note attached to them | 281 | # sure all old devtool related commits have a note attached to them |
| 281 | # (this assumes git config notes.rewriteMode is set to ignore). | 282 | # (this assumes git config notes.rewriteMode is set to ignore). |
| 282 | (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch) | 283 | (stdout, _) = _run('git rev-list devtool-base..%s' % target_branch, cwd=path) |
| 283 | for rev in stdout.splitlines(): | 284 | for rev in stdout.splitlines(): |
| 284 | if not oe.patch.GitApplyTree.getNotes(path, rev): | 285 | if not oe.patch.GitApplyTree.getNotes(path, rev): |
| 285 | oe.patch.GitApplyTree.addNote(path, rev, "dummy") | 286 | oe.patch.GitApplyTree.addNote(path, rev, "dummy") |
| @@ -297,7 +298,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
| 297 | logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) | 298 | logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) |
| 298 | 299 | ||
| 299 | # Remove any dummy notes added above. | 300 | # Remove any dummy notes added above. |
| 300 | (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch) | 301 | (stdout, _) = _run('git rev-list devtool-base..%s' % target_branch, cwd=path) |
| 301 | for rev in stdout.splitlines(): | 302 | for rev in stdout.splitlines(): |
| 302 | oe.patch.GitApplyTree.removeNote(path, rev, "dummy") | 303 | oe.patch.GitApplyTree.removeNote(path, rev, "dummy") |
| 303 | 304 | ||
| @@ -335,19 +336,19 @@ def _add_license_diff_to_recipe(path, diff): | |||
| 335 | def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): | 336 | def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): |
| 336 | """Creates the new recipe under workspace""" | 337 | """Creates the new recipe under workspace""" |
| 337 | 338 | ||
| 338 | bpn = rd.getVar('BPN') | 339 | pn = rd.getVar('PN') |
| 339 | path = os.path.join(workspace, 'recipes', bpn) | 340 | path = os.path.join(workspace, 'recipes', pn) |
| 340 | bb.utils.mkdirhier(path) | 341 | bb.utils.mkdirhier(path) |
| 341 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) | 342 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) |
| 342 | if not copied: | 343 | if not copied: |
| 343 | raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn) | 344 | raise DevtoolError('Internal error - no files were copied for recipe %s' % pn) |
| 344 | logger.debug('Copied %s to %s' % (copied, path)) | 345 | logger.debug('Copied %s to %s' % (copied, path)) |
| 345 | 346 | ||
| 346 | oldpv = rd.getVar('PV') | 347 | oldpv = rd.getVar('PV') |
| 347 | if not newpv: | 348 | if not newpv: |
| 348 | newpv = oldpv | 349 | newpv = oldpv |
| 349 | origpath = rd.getVar('FILE') | 350 | origpath = rd.getVar('FILE') |
| 350 | fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path) | 351 | fullpath = _rename_recipe_files(origpath, pn, oldpv, newpv, path) |
| 351 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) | 352 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) |
| 352 | 353 | ||
| 353 | newvalues = {} | 354 | newvalues = {} |
| @@ -534,6 +535,15 @@ def _generate_license_diff(old_licenses, new_licenses): | |||
| 534 | diff = diff + line | 535 | diff = diff + line |
| 535 | return diff | 536 | return diff |
| 536 | 537 | ||
| 538 | def _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil): | ||
| 539 | tasks = [] | ||
| 540 | for task in (rd.getVar('RECIPE_UPGRADE_EXTRA_TASKS') or '').split(): | ||
| 541 | logger.info('Running extra recipe upgrade task: %s' % task) | ||
| 542 | res = tinfoil.build_targets(pn, task, handle_events=True) | ||
| 543 | |||
| 544 | if not res: | ||
| 545 | raise DevtoolError('Running extra recipe upgrade task %s for %s failed' % (task, pn)) | ||
| 546 | |||
| 537 | def upgrade(args, config, basepath, workspace): | 547 | def upgrade(args, config, basepath, workspace): |
| 538 | """Entry point for the devtool 'upgrade' subcommand""" | 548 | """Entry point for the devtool 'upgrade' subcommand""" |
| 539 | 549 | ||
| @@ -561,7 +571,7 @@ def upgrade(args, config, basepath, workspace): | |||
| 561 | else: | 571 | else: |
| 562 | srctree = standard.get_default_srctree(config, pn) | 572 | srctree = standard.get_default_srctree(config, pn) |
| 563 | 573 | ||
| 564 | srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) | 574 | srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR')) |
| 565 | 575 | ||
| 566 | # try to automatically discover latest version and revision if not provided on command line | 576 | # try to automatically discover latest version and revision if not provided on command line |
| 567 | if not args.version and not args.srcrev: | 577 | if not args.version and not args.srcrev: |
| @@ -601,7 +611,7 @@ def upgrade(args, config, basepath, workspace): | |||
| 601 | license_diff = _generate_license_diff(old_licenses, new_licenses) | 611 | license_diff = _generate_license_diff(old_licenses, new_licenses) |
| 602 | rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) | 612 | rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) |
| 603 | except (bb.process.CmdError, DevtoolError) as e: | 613 | except (bb.process.CmdError, DevtoolError) as e: |
| 604 | recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN')) | 614 | recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('PN')) |
| 605 | _upgrade_error(e, recipedir, srctree, args.keep_failure) | 615 | _upgrade_error(e, recipedir, srctree, args.keep_failure) |
| 606 | standard._add_md5(config, pn, os.path.dirname(rf)) | 616 | standard._add_md5(config, pn, os.path.dirname(rf)) |
| 607 | 617 | ||
| @@ -609,6 +619,8 @@ def upgrade(args, config, basepath, workspace): | |||
| 609 | copied, config.workspace_path, rd) | 619 | copied, config.workspace_path, rd) |
| 610 | standard._add_md5(config, pn, af) | 620 | standard._add_md5(config, pn, af) |
| 611 | 621 | ||
| 622 | _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil) | ||
| 623 | |||
| 612 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) | 624 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) |
| 613 | 625 | ||
| 614 | logger.info('Upgraded source extracted to %s' % srctree) | 626 | logger.info('Upgraded source extracted to %s' % srctree) |
| @@ -643,18 +655,28 @@ def latest_version(args, config, basepath, workspace): | |||
| 643 | return 0 | 655 | return 0 |
| 644 | 656 | ||
| 645 | def check_upgrade_status(args, config, basepath, workspace): | 657 | def check_upgrade_status(args, config, basepath, workspace): |
| 658 | def _print_status(recipe): | ||
| 659 | print("{:25} {:15} {:15} {} {} {}".format( recipe['pn'], | ||
| 660 | recipe['cur_ver'], | ||
| 661 | recipe['status'] if recipe['status'] != 'UPDATE' else (recipe['next_ver'] if not recipe['next_ver'].endswith("new-commits-available") else "new commits"), | ||
| 662 | recipe['maintainer'], | ||
| 663 | recipe['revision'] if recipe['revision'] != 'N/A' else "", | ||
| 664 | "cannot be updated due to: %s" %(recipe['no_upgrade_reason']) if recipe['no_upgrade_reason'] else "")) | ||
| 646 | if not args.recipe: | 665 | if not args.recipe: |
| 647 | logger.info("Checking the upstream status for all recipes may take a few minutes") | 666 | logger.info("Checking the upstream status for all recipes may take a few minutes") |
| 648 | results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) | 667 | results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) |
| 649 | for result in results: | 668 | for recipegroup in results: |
| 650 | # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason | 669 | upgrades = [r for r in recipegroup if r['status'] != 'MATCH'] |
| 651 | if args.all or result[1] != 'MATCH': | 670 | currents = [r for r in recipegroup if r['status'] == 'MATCH'] |
| 652 | print("{:25} {:15} {:15} {} {} {}".format( result[0], | 671 | if len(upgrades) > 1: |
| 653 | result[2], | 672 | print("These recipes need to be upgraded together {") |
| 654 | result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"), | 673 | for r in sorted(upgrades, key=lambda r:r['pn']): |
| 655 | result[4], | 674 | _print_status(r) |
| 656 | result[5] if result[5] != 'N/A' else "", | 675 | if len(upgrades) > 1: |
| 657 | "cannot be updated due to: %s" %(result[6]) if result[6] else "")) | 676 | print("}") |
| 677 | for r in currents: | ||
| 678 | if args.all: | ||
| 679 | _print_status(r) | ||
| 658 | 680 | ||
| 659 | def register_commands(subparsers, context): | 681 | def register_commands(subparsers, context): |
| 660 | """Register devtool subcommands from this plugin""" | 682 | """Register devtool subcommands from this plugin""" |
diff --git a/scripts/lib/devtool/utilcmds.py b/scripts/lib/devtool/utilcmds.py index 964817766b..bf39f71b11 100644 --- a/scripts/lib/devtool/utilcmds.py +++ b/scripts/lib/devtool/utilcmds.py | |||
| @@ -64,7 +64,7 @@ def configure_help(args, config, basepath, workspace): | |||
| 64 | b = rd.getVar('B') | 64 | b = rd.getVar('B') |
| 65 | s = rd.getVar('S') | 65 | s = rd.getVar('S') |
| 66 | configurescript = os.path.join(s, 'configure') | 66 | configurescript = os.path.join(s, 'configure') |
| 67 | confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or []) | 67 | confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (bb.build.listtasks(rd)) |
| 68 | configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') | 68 | configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') |
| 69 | extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') | 69 | extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') |
| 70 | extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') | 70 | extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') |
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py index 10945d6008..041d79f162 100644 --- a/scripts/lib/recipetool/append.py +++ b/scripts/lib/recipetool/append.py | |||
| @@ -317,7 +317,7 @@ def appendsrc(args, files, rd, extralines=None): | |||
| 317 | import oe.recipeutils | 317 | import oe.recipeutils |
| 318 | 318 | ||
| 319 | srcdir = rd.getVar('S') | 319 | srcdir = rd.getVar('S') |
| 320 | workdir = rd.getVar('WORKDIR') | 320 | unpackdir = rd.getVar('UNPACKDIR') |
| 321 | 321 | ||
| 322 | import bb.fetch | 322 | import bb.fetch |
| 323 | simplified = {} | 323 | simplified = {} |
| @@ -336,10 +336,10 @@ def appendsrc(args, files, rd, extralines=None): | |||
| 336 | src_destdir = os.path.dirname(srcfile) | 336 | src_destdir = os.path.dirname(srcfile) |
| 337 | if not args.use_workdir: | 337 | if not args.use_workdir: |
| 338 | if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): | 338 | if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): |
| 339 | srcdir = os.path.join(workdir, 'git') | 339 | srcdir = os.path.join(unpackdir, rd.getVar('BB_GIT_DEFAULT_DESTSUFFIX')) |
| 340 | if not bb.data.inherits_class('kernel-yocto', rd): | 340 | if not bb.data.inherits_class('kernel-yocto', rd): |
| 341 | logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') | 341 | logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${UNPACKDIR}/${BB_GIT_DEFAULT_DESTSUFFIX}') |
| 342 | src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir) | 342 | src_destdir = os.path.join(os.path.relpath(srcdir, unpackdir), src_destdir) |
| 343 | src_destdir = os.path.normpath(src_destdir) | 343 | src_destdir = os.path.normpath(src_destdir) |
| 344 | 344 | ||
| 345 | if src_destdir and src_destdir != '.': | 345 | if src_destdir and src_destdir != '.': |
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py index 8e9ff38db6..ef0ba974a9 100644 --- a/scripts/lib/recipetool/create.py +++ b/scripts/lib/recipetool/create.py | |||
| @@ -18,6 +18,8 @@ from urllib.parse import urlparse, urldefrag, urlsplit | |||
| 18 | import hashlib | 18 | import hashlib |
| 19 | import bb.fetch2 | 19 | import bb.fetch2 |
| 20 | logger = logging.getLogger('recipetool') | 20 | logger = logging.getLogger('recipetool') |
| 21 | from oe.license import tidy_licenses | ||
| 22 | from oe.license_finder import find_licenses | ||
| 21 | 23 | ||
| 22 | tinfoil = None | 24 | tinfoil = None |
| 23 | plugins = None | 25 | plugins = None |
| @@ -528,7 +530,7 @@ def create_recipe(args): | |||
| 528 | if ftmpdir and args.keep_temp: | 530 | if ftmpdir and args.keep_temp: |
| 529 | logger.info('Fetch temp directory is %s' % ftmpdir) | 531 | logger.info('Fetch temp directory is %s' % ftmpdir) |
| 530 | 532 | ||
| 531 | dirlist = scriptutils.filter_src_subdirs(srctree) | 533 | dirlist = os.listdir(srctree) |
| 532 | logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) | 534 | logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) |
| 533 | if len(dirlist) == 1: | 535 | if len(dirlist) == 1: |
| 534 | singleitem = os.path.join(srctree, dirlist[0]) | 536 | singleitem = os.path.join(srctree, dirlist[0]) |
| @@ -637,7 +639,6 @@ def create_recipe(args): | |||
| 637 | if len(splitline) > 1: | 639 | if len(splitline) > 1: |
| 638 | if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): | 640 | if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): |
| 639 | srcuri = reformat_git_uri(splitline[1]) + ';branch=master' | 641 | srcuri = reformat_git_uri(splitline[1]) + ';branch=master' |
| 640 | srcsubdir = 'git' | ||
| 641 | break | 642 | break |
| 642 | 643 | ||
| 643 | if args.src_subdir: | 644 | if args.src_subdir: |
| @@ -735,7 +736,7 @@ def create_recipe(args): | |||
| 735 | if srcsubdir and not args.binary: | 736 | if srcsubdir and not args.binary: |
| 736 | # (for binary packages we explicitly specify subdir= when fetching to | 737 | # (for binary packages we explicitly specify subdir= when fetching to |
| 737 | # match the default value of S, so we don't need to set it in that case) | 738 | # match the default value of S, so we don't need to set it in that case) |
| 738 | lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) | 739 | lines_before.append('S = "${UNPACKDIR}/%s"' % srcsubdir) |
| 739 | lines_before.append('') | 740 | lines_before.append('') |
| 740 | 741 | ||
| 741 | if pkgarch: | 742 | if pkgarch: |
| @@ -764,6 +765,7 @@ def create_recipe(args): | |||
| 764 | extrafiles = extravalues.pop('extrafiles', {}) | 765 | extrafiles = extravalues.pop('extrafiles', {}) |
| 765 | extra_pn = extravalues.pop('PN', None) | 766 | extra_pn = extravalues.pop('PN', None) |
| 766 | extra_pv = extravalues.pop('PV', None) | 767 | extra_pv = extravalues.pop('PV', None) |
| 768 | run_tasks = extravalues.pop('run_tasks', "").split() | ||
| 767 | 769 | ||
| 768 | if extra_pv and not realpv: | 770 | if extra_pv and not realpv: |
| 769 | realpv = extra_pv | 771 | realpv = extra_pv |
| @@ -824,7 +826,8 @@ def create_recipe(args): | |||
| 824 | extraoutdir = os.path.join(os.path.dirname(outfile), pn) | 826 | extraoutdir = os.path.join(os.path.dirname(outfile), pn) |
| 825 | bb.utils.mkdirhier(extraoutdir) | 827 | bb.utils.mkdirhier(extraoutdir) |
| 826 | for destfn, extrafile in extrafiles.items(): | 828 | for destfn, extrafile in extrafiles.items(): |
| 827 | shutil.move(extrafile, os.path.join(extraoutdir, destfn)) | 829 | fn = destfn.format(pn=pn, pv=realpv) |
| 830 | shutil.move(extrafile, os.path.join(extraoutdir, fn)) | ||
| 828 | 831 | ||
| 829 | lines = lines_before | 832 | lines = lines_before |
| 830 | lines_before = [] | 833 | lines_before = [] |
| @@ -839,7 +842,7 @@ def create_recipe(args): | |||
| 839 | line = line.replace(realpv, '${PV}') | 842 | line = line.replace(realpv, '${PV}') |
| 840 | if pn: | 843 | if pn: |
| 841 | line = line.replace(pn, '${BPN}') | 844 | line = line.replace(pn, '${BPN}') |
| 842 | if line == 'S = "${WORKDIR}/${BPN}-${PV}"': | 845 | if line == 'S = "${UNPACKDIR}/${BPN}-${PV}"' or 'tmp-recipetool-' in line: |
| 843 | skipblank = True | 846 | skipblank = True |
| 844 | continue | 847 | continue |
| 845 | elif line.startswith('SRC_URI = '): | 848 | elif line.startswith('SRC_URI = '): |
| @@ -917,6 +920,10 @@ def create_recipe(args): | |||
| 917 | log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) | 920 | log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) |
| 918 | tinfoil.modified_files() | 921 | tinfoil.modified_files() |
| 919 | 922 | ||
| 923 | for task in run_tasks: | ||
| 924 | logger.info("Running task %s" % task) | ||
| 925 | tinfoil.build_file_sync(outfile, task) | ||
| 926 | |||
| 920 | if tempsrc: | 927 | if tempsrc: |
| 921 | if args.keep_temp: | 928 | if args.keep_temp: |
| 922 | logger.info('Preserving temporary directory %s' % tempsrc) | 929 | logger.info('Preserving temporary directory %s' % tempsrc) |
| @@ -944,23 +951,13 @@ def fixup_license(value): | |||
| 944 | return '(' + value + ')' | 951 | return '(' + value + ')' |
| 945 | return value | 952 | return value |
| 946 | 953 | ||
| 947 | def tidy_licenses(value): | ||
| 948 | """Flat, split and sort licenses""" | ||
| 949 | from oe.license import flattened_licenses | ||
| 950 | def _choose(a, b): | ||
| 951 | str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold) | ||
| 952 | return ["(%s | %s)" % (str_a, str_b)] | ||
| 953 | if not isinstance(value, str): | ||
| 954 | value = " & ".join(value) | ||
| 955 | return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold) | ||
| 956 | |||
| 957 | def handle_license_vars(srctree, lines_before, handled, extravalues, d): | 954 | def handle_license_vars(srctree, lines_before, handled, extravalues, d): |
| 958 | lichandled = [x for x in handled if x[0] == 'license'] | 955 | lichandled = [x for x in handled if x[0] == 'license'] |
| 959 | if lichandled: | 956 | if lichandled: |
| 960 | # Someone else has already handled the license vars, just return their value | 957 | # Someone else has already handled the license vars, just return their value |
| 961 | return lichandled[0][1] | 958 | return lichandled[0][1] |
| 962 | 959 | ||
| 963 | licvalues = guess_license(srctree, d) | 960 | licvalues = find_licenses(srctree, d) |
| 964 | licenses = [] | 961 | licenses = [] |
| 965 | lic_files_chksum = [] | 962 | lic_files_chksum = [] |
| 966 | lic_unknown = [] | 963 | lic_unknown = [] |
| @@ -1040,222 +1037,9 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d): | |||
| 1040 | handled.append(('license', licvalues)) | 1037 | handled.append(('license', licvalues)) |
| 1041 | return licvalues | 1038 | return licvalues |
| 1042 | 1039 | ||
| 1043 | def get_license_md5sums(d, static_only=False, linenumbers=False): | ||
| 1044 | import bb.utils | ||
| 1045 | import csv | ||
| 1046 | md5sums = {} | ||
| 1047 | if not static_only and not linenumbers: | ||
| 1048 | # Gather md5sums of license files in common license dir | ||
| 1049 | commonlicdir = d.getVar('COMMON_LICENSE_DIR') | ||
| 1050 | for fn in os.listdir(commonlicdir): | ||
| 1051 | md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn)) | ||
| 1052 | md5sums[md5value] = fn | ||
| 1053 | |||
| 1054 | # The following were extracted from common values in various recipes | ||
| 1055 | # (double checking the license against the license file itself, not just | ||
| 1056 | # the LICENSE value in the recipe) | ||
| 1057 | |||
| 1058 | # Read license md5sums from csv file | ||
| 1059 | scripts_path = os.path.dirname(os.path.realpath(__file__)) | ||
| 1060 | for path in (d.getVar('BBPATH').split(':') | ||
| 1061 | + [os.path.join(scripts_path, '..', '..')]): | ||
| 1062 | csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv') | ||
| 1063 | if os.path.isfile(csv_path): | ||
| 1064 | with open(csv_path, newline='') as csv_file: | ||
| 1065 | fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5'] | ||
| 1066 | reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames) | ||
| 1067 | for row in reader: | ||
| 1068 | if linenumbers: | ||
| 1069 | md5sums[row['md5sum']] = ( | ||
| 1070 | row['license'], row['beginline'], row['endline'], row['md5']) | ||
| 1071 | else: | ||
| 1072 | md5sums[row['md5sum']] = row['license'] | ||
| 1073 | |||
| 1074 | return md5sums | ||
| 1075 | |||
| 1076 | def crunch_known_licenses(d): | ||
| 1077 | ''' | ||
| 1078 | Calculate the MD5 checksums for the crunched versions of all common | ||
| 1079 | licenses. Also add additional known checksums. | ||
| 1080 | ''' | ||
| 1081 | |||
| 1082 | crunched_md5sums = {} | ||
| 1083 | |||
| 1084 | # common licenses | ||
| 1085 | crunched_md5sums['ad4e9d34a2e966dfe9837f18de03266d'] = 'GFDL-1.1-only' | ||
| 1086 | crunched_md5sums['d014fb11a34eb67dc717fdcfc97e60ed'] = 'GFDL-1.2-only' | ||
| 1087 | crunched_md5sums['e020ca655b06c112def28e597ab844f1'] = 'GFDL-1.3-only' | ||
| 1088 | |||
| 1089 | # The following two were gleaned from the "forever" npm package | ||
| 1090 | crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC' | ||
| 1091 | # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt | ||
| 1092 | crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause' | ||
| 1093 | # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE | ||
| 1094 | crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPL-2.0-only' | ||
| 1095 | # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt | ||
| 1096 | crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPL-2.0-only' | ||
| 1097 | # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1 | ||
| 1098 | crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPL-2.1-only' | ||
| 1099 | # unixODBC-2.3.4 COPYING | ||
| 1100 | crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPL-2.1-only' | ||
| 1101 | # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3 | ||
| 1102 | crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPL-3.0-only' | ||
| 1103 | # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10 | ||
| 1104 | crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0' | ||
| 1105 | |||
| 1106 | # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD | ||
| 1107 | crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause' | ||
| 1108 | # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE | ||
| 1109 | crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause' | ||
| 1110 | # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE | ||
| 1111 | crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause' | ||
| 1112 | # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE | ||
| 1113 | crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause' | ||
| 1114 | # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE | ||
| 1115 | crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause' | ||
| 1116 | # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE | ||
| 1117 | crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause' | ||
| 1118 | # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE | ||
| 1119 | crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause' | ||
| 1120 | # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE | ||
| 1121 | crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause' | ||
| 1122 | # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE | ||
| 1123 | crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause' | ||
| 1124 | # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE | ||
| 1125 | crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT' | ||
| 1126 | # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE | ||
| 1127 | crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT' | ||
| 1128 | # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE | ||
| 1129 | crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0' | ||
| 1130 | # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md | ||
| 1131 | crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0' | ||
| 1132 | # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE | ||
| 1133 | crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0' | ||
| 1134 | # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt | ||
| 1135 | crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0' | ||
| 1136 | # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE | ||
| 1137 | crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0' | ||
| 1138 | # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE | ||
| 1139 | crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense' | ||
| 1140 | # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md | ||
| 1141 | crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib' | ||
| 1142 | |||
| 1143 | commonlicdir = d.getVar('COMMON_LICENSE_DIR') | ||
| 1144 | for fn in sorted(os.listdir(commonlicdir)): | ||
| 1145 | md5value, lictext = crunch_license(os.path.join(commonlicdir, fn)) | ||
| 1146 | if md5value not in crunched_md5sums: | ||
| 1147 | crunched_md5sums[md5value] = fn | ||
| 1148 | elif fn != crunched_md5sums[md5value]: | ||
| 1149 | bb.debug(2, "crunched_md5sums['%s'] is already set to '%s' rather than '%s'" % (md5value, crunched_md5sums[md5value], fn)) | ||
| 1150 | else: | ||
| 1151 | bb.debug(2, "crunched_md5sums['%s'] is already set to '%s'" % (md5value, crunched_md5sums[md5value])) | ||
| 1152 | |||
| 1153 | return crunched_md5sums | ||
| 1154 | |||
| 1155 | def crunch_license(licfile): | ||
| 1156 | ''' | ||
| 1157 | Remove non-material text from a license file and then calculate its | ||
| 1158 | md5sum. This works well for licenses that contain a copyright statement, | ||
| 1159 | but is also a useful way to handle people's insistence upon reformatting | ||
| 1160 | the license text slightly (with no material difference to the text of the | ||
| 1161 | license). | ||
| 1162 | ''' | ||
| 1163 | |||
| 1164 | import oe.utils | ||
| 1165 | |||
| 1166 | # Note: these are carefully constructed! | ||
| 1167 | license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$') | ||
| 1168 | license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$') | ||
| 1169 | copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$') | ||
| 1170 | disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$') | ||
| 1171 | email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$') | ||
| 1172 | header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$') | ||
| 1173 | tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$') | ||
| 1174 | url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$') | ||
| 1175 | |||
| 1176 | lictext = [] | ||
| 1177 | with open(licfile, 'r', errors='surrogateescape') as f: | ||
| 1178 | for line in f: | ||
| 1179 | # Drop opening statements | ||
| 1180 | if copyright_re.match(line): | ||
| 1181 | continue | ||
| 1182 | elif disclaimer_re.match(line): | ||
| 1183 | continue | ||
| 1184 | elif email_re.match(line): | ||
| 1185 | continue | ||
| 1186 | elif header_re.match(line): | ||
| 1187 | continue | ||
| 1188 | elif tag_re.match(line): | ||
| 1189 | continue | ||
| 1190 | elif url_re.match(line): | ||
| 1191 | continue | ||
| 1192 | elif license_title_re.match(line): | ||
| 1193 | continue | ||
| 1194 | elif license_statement_re.match(line): | ||
| 1195 | continue | ||
| 1196 | # Strip comment symbols | ||
| 1197 | line = line.replace('*', '') \ | ||
| 1198 | .replace('#', '') | ||
| 1199 | # Unify spelling | ||
| 1200 | line = line.replace('sub-license', 'sublicense') | ||
| 1201 | # Squash spaces | ||
| 1202 | line = oe.utils.squashspaces(line.strip()) | ||
| 1203 | # Replace smart quotes, double quotes and backticks with single quotes | ||
| 1204 | line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'') | ||
| 1205 | # Unify brackets | ||
| 1206 | line = line.replace("{", "[").replace("}", "]") | ||
| 1207 | if line: | ||
| 1208 | lictext.append(line) | ||
| 1209 | |||
| 1210 | m = hashlib.md5() | ||
| 1211 | try: | ||
| 1212 | m.update(' '.join(lictext).encode('utf-8')) | ||
| 1213 | md5val = m.hexdigest() | ||
| 1214 | except UnicodeEncodeError: | ||
| 1215 | md5val = None | ||
| 1216 | lictext = '' | ||
| 1217 | return md5val, lictext | ||
| 1218 | |||
| 1219 | def guess_license(srctree, d): | ||
| 1220 | import bb | ||
| 1221 | md5sums = get_license_md5sums(d) | ||
| 1222 | |||
| 1223 | crunched_md5sums = crunch_known_licenses(d) | ||
| 1224 | |||
| 1225 | licenses = [] | ||
| 1226 | licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] | ||
| 1227 | skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go") | ||
| 1228 | licfiles = [] | ||
| 1229 | for root, dirs, files in os.walk(srctree): | ||
| 1230 | for fn in files: | ||
| 1231 | if fn.endswith(skip_extensions): | ||
| 1232 | continue | ||
| 1233 | for spec in licspecs: | ||
| 1234 | if fnmatch.fnmatch(fn, spec): | ||
| 1235 | fullpath = os.path.join(root, fn) | ||
| 1236 | if not fullpath in licfiles: | ||
| 1237 | licfiles.append(fullpath) | ||
| 1238 | for licfile in sorted(licfiles): | ||
| 1239 | md5value = bb.utils.md5_file(licfile) | ||
| 1240 | license = md5sums.get(md5value, None) | ||
| 1241 | if not license: | ||
| 1242 | crunched_md5, lictext = crunch_license(licfile) | ||
| 1243 | license = crunched_md5sums.get(crunched_md5, None) | ||
| 1244 | if lictext and not license: | ||
| 1245 | license = 'Unknown' | ||
| 1246 | logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \ | ||
| 1247 | "and replace `Unknown` with the license:\n" \ | ||
| 1248 | "%s,Unknown" % (os.path.relpath(licfile, srctree), md5value)) | ||
| 1249 | if license: | ||
| 1250 | licenses.append((license, os.path.relpath(licfile, srctree), md5value)) | ||
| 1251 | |||
| 1252 | # FIXME should we grab at least one source file with a license header and add that too? | ||
| 1253 | |||
| 1254 | return licenses | ||
| 1255 | |||
| 1256 | def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): | 1040 | def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): |
| 1257 | """ | 1041 | """ |
| 1258 | Given a list of (license, path, md5sum) as returned by guess_license(), | 1042 | Given a list of (license, path, md5sum) as returned by match_licenses(), |
| 1259 | a dict of package name to path mappings, write out a set of | 1043 | a dict of package name to path mappings, write out a set of |
| 1260 | package-specific LICENSE values. | 1044 | package-specific LICENSE values. |
| 1261 | """ | 1045 | """ |
| @@ -1284,6 +1068,14 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn | |||
| 1284 | outlicenses[pkgname] = licenses | 1068 | outlicenses[pkgname] = licenses |
| 1285 | return outlicenses | 1069 | return outlicenses |
| 1286 | 1070 | ||
| 1071 | def generate_common_licenses_chksums(common_licenses, d): | ||
| 1072 | lic_files_chksums = [] | ||
| 1073 | for license in tidy_licenses(common_licenses): | ||
| 1074 | licfile = '${COMMON_LICENSE_DIR}/' + license | ||
| 1075 | md5value = bb.utils.md5_file(d.expand(licfile)) | ||
| 1076 | lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value)) | ||
| 1077 | return lic_files_chksums | ||
| 1078 | |||
| 1287 | def read_pkgconfig_provides(d): | 1079 | def read_pkgconfig_provides(d): |
| 1288 | pkgdatadir = d.getVar('PKGDATA_DIR') | 1080 | pkgdatadir = d.getVar('PKGDATA_DIR') |
| 1289 | pkgmap = {} | 1081 | pkgmap = {} |
| @@ -1418,4 +1210,3 @@ def register_commands(subparsers): | |||
| 1418 | parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) | 1210 | parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) |
| 1419 | parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') | 1211 | parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') |
| 1420 | parser_create.set_defaults(func=create_recipe) | 1212 | parser_create.set_defaults(func=create_recipe) |
| 1421 | |||
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py index a85a2f2786..1b2e5a03d5 100644 --- a/scripts/lib/recipetool/create_go.py +++ b/scripts/lib/recipetool/create_go.py | |||
| @@ -10,13 +10,7 @@ | |||
| 10 | # | 10 | # |
| 11 | 11 | ||
| 12 | 12 | ||
| 13 | from collections import namedtuple | ||
| 14 | from enum import Enum | ||
| 15 | from html.parser import HTMLParser | ||
| 16 | from recipetool.create import RecipeHandler, handle_license_vars | 13 | from recipetool.create import RecipeHandler, handle_license_vars |
| 17 | from recipetool.create import guess_license, tidy_licenses, fixup_license | ||
| 18 | from recipetool.create import determine_from_url | ||
| 19 | from urllib.error import URLError, HTTPError | ||
| 20 | 14 | ||
| 21 | import bb.utils | 15 | import bb.utils |
| 22 | import json | 16 | import json |
| @@ -25,27 +19,13 @@ import os | |||
| 25 | import re | 19 | import re |
| 26 | import subprocess | 20 | import subprocess |
| 27 | import sys | 21 | import sys |
| 28 | import shutil | ||
| 29 | import tempfile | 22 | import tempfile |
| 30 | import urllib.parse | ||
| 31 | import urllib.request | ||
| 32 | 23 | ||
| 33 | 24 | ||
| 34 | GoImport = namedtuple('GoImport', 'root vcs url suffix') | ||
| 35 | logger = logging.getLogger('recipetool') | 25 | logger = logging.getLogger('recipetool') |
| 36 | CodeRepo = namedtuple( | ||
| 37 | 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor') | ||
| 38 | 26 | ||
| 39 | tinfoil = None | 27 | tinfoil = None |
| 40 | 28 | ||
| 41 | # Regular expression to parse pseudo semantic version | ||
| 42 | # see https://go.dev/ref/mod#pseudo-versions | ||
| 43 | re_pseudo_semver = re.compile( | ||
| 44 | r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$") | ||
| 45 | # Regular expression to parse semantic version | ||
| 46 | re_semver = re.compile( | ||
| 47 | r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$") | ||
| 48 | |||
| 49 | 29 | ||
| 50 | def tinfoil_init(instance): | 30 | def tinfoil_init(instance): |
| 51 | global tinfoil | 31 | global tinfoil |
| @@ -83,578 +63,6 @@ class GoRecipeHandler(RecipeHandler): | |||
| 83 | 63 | ||
| 84 | return bindir | 64 | return bindir |
| 85 | 65 | ||
| 86 | def __resolve_repository_static(self, modulepath): | ||
| 87 | """Resolve the repository in a static manner | ||
| 88 | |||
| 89 | The method is based on the go implementation of | ||
| 90 | `repoRootFromVCSPaths` in | ||
| 91 | https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go | ||
| 92 | """ | ||
| 93 | |||
| 94 | url = urllib.parse.urlparse("https://" + modulepath) | ||
| 95 | req = urllib.request.Request(url.geturl()) | ||
| 96 | |||
| 97 | try: | ||
| 98 | resp = urllib.request.urlopen(req) | ||
| 99 | # Some modulepath are just redirects to github (or some other vcs | ||
| 100 | # hoster). Therefore, we check if this modulepath redirects to | ||
| 101 | # somewhere else | ||
| 102 | if resp.geturl() != url.geturl(): | ||
| 103 | bb.debug(1, "%s is redirectred to %s" % | ||
| 104 | (url.geturl(), resp.geturl())) | ||
| 105 | url = urllib.parse.urlparse(resp.geturl()) | ||
| 106 | modulepath = url.netloc + url.path | ||
| 107 | |||
| 108 | except URLError as url_err: | ||
| 109 | # This is probably because the module path | ||
| 110 | # contains the subdir and major path. Thus, | ||
| 111 | # we ignore this error for now | ||
| 112 | logger.debug( | ||
| 113 | 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err))) | ||
| 114 | |||
| 115 | host, _, _ = modulepath.partition('/') | ||
| 116 | |||
| 117 | class vcs(Enum): | ||
| 118 | pathprefix = "pathprefix" | ||
| 119 | regexp = "regexp" | ||
| 120 | type = "type" | ||
| 121 | repo = "repo" | ||
| 122 | check = "check" | ||
| 123 | schemelessRepo = "schemelessRepo" | ||
| 124 | |||
| 125 | # GitHub | ||
| 126 | vcsGitHub = {} | ||
| 127 | vcsGitHub[vcs.pathprefix] = "github.com" | ||
| 128 | vcsGitHub[vcs.regexp] = re.compile( | ||
| 129 | r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 130 | vcsGitHub[vcs.type] = "git" | ||
| 131 | vcsGitHub[vcs.repo] = "https://\\g<root>" | ||
| 132 | |||
| 133 | # Bitbucket | ||
| 134 | vcsBitbucket = {} | ||
| 135 | vcsBitbucket[vcs.pathprefix] = "bitbucket.org" | ||
| 136 | vcsBitbucket[vcs.regexp] = re.compile( | ||
| 137 | r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 138 | vcsBitbucket[vcs.type] = "git" | ||
| 139 | vcsBitbucket[vcs.repo] = "https://\\g<root>" | ||
| 140 | |||
| 141 | # IBM DevOps Services (JazzHub) | ||
| 142 | vcsIBMDevOps = {} | ||
| 143 | vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git" | ||
| 144 | vcsIBMDevOps[vcs.regexp] = re.compile( | ||
| 145 | r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 146 | vcsIBMDevOps[vcs.type] = "git" | ||
| 147 | vcsIBMDevOps[vcs.repo] = "https://\\g<root>" | ||
| 148 | |||
| 149 | # Git at Apache | ||
| 150 | vcsApacheGit = {} | ||
| 151 | vcsApacheGit[vcs.pathprefix] = "git.apache.org" | ||
| 152 | vcsApacheGit[vcs.regexp] = re.compile( | ||
| 153 | r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 154 | vcsApacheGit[vcs.type] = "git" | ||
| 155 | vcsApacheGit[vcs.repo] = "https://\\g<root>" | ||
| 156 | |||
| 157 | # Git at OpenStack | ||
| 158 | vcsOpenStackGit = {} | ||
| 159 | vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org" | ||
| 160 | vcsOpenStackGit[vcs.regexp] = re.compile( | ||
| 161 | r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 162 | vcsOpenStackGit[vcs.type] = "git" | ||
| 163 | vcsOpenStackGit[vcs.repo] = "https://\\g<root>" | ||
| 164 | |||
| 165 | # chiselapp.com for fossil | ||
| 166 | vcsChiselapp = {} | ||
| 167 | vcsChiselapp[vcs.pathprefix] = "chiselapp.com" | ||
| 168 | vcsChiselapp[vcs.regexp] = re.compile( | ||
| 169 | r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$') | ||
| 170 | vcsChiselapp[vcs.type] = "fossil" | ||
| 171 | vcsChiselapp[vcs.repo] = "https://\\g<root>" | ||
| 172 | |||
| 173 | # General syntax for any server. | ||
| 174 | # Must be last. | ||
| 175 | vcsGeneralServer = {} | ||
| 176 | vcsGeneralServer[vcs.regexp] = re.compile( | ||
| 177 | "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$") | ||
| 178 | vcsGeneralServer[vcs.schemelessRepo] = True | ||
| 179 | |||
| 180 | vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps, | ||
| 181 | vcsApacheGit, vcsOpenStackGit, vcsChiselapp, | ||
| 182 | vcsGeneralServer] | ||
| 183 | |||
| 184 | if modulepath.startswith("example.net") or modulepath == "rsc.io": | ||
| 185 | logger.warning("Suspicious module path %s" % modulepath) | ||
| 186 | return None | ||
| 187 | if modulepath.startswith("http:") or modulepath.startswith("https:"): | ||
| 188 | logger.warning("Import path should not start with %s %s" % | ||
| 189 | ("http", "https")) | ||
| 190 | return None | ||
| 191 | |||
| 192 | rootpath = None | ||
| 193 | vcstype = None | ||
| 194 | repourl = None | ||
| 195 | suffix = None | ||
| 196 | |||
| 197 | for srv in vcsPaths: | ||
| 198 | m = srv[vcs.regexp].match(modulepath) | ||
| 199 | if vcs.pathprefix in srv: | ||
| 200 | if host == srv[vcs.pathprefix]: | ||
| 201 | rootpath = m.group('root') | ||
| 202 | vcstype = srv[vcs.type] | ||
| 203 | repourl = m.expand(srv[vcs.repo]) | ||
| 204 | suffix = m.group('suffix') | ||
| 205 | break | ||
| 206 | elif m and srv[vcs.schemelessRepo]: | ||
| 207 | rootpath = m.group('root') | ||
| 208 | vcstype = m[vcs.type] | ||
| 209 | repourl = m[vcs.repo] | ||
| 210 | suffix = m.group('suffix') | ||
| 211 | break | ||
| 212 | |||
| 213 | return GoImport(rootpath, vcstype, repourl, suffix) | ||
| 214 | |||
| 215 | def __resolve_repository_dynamic(self, modulepath): | ||
| 216 | """Resolve the repository root in a dynamic manner. | ||
| 217 | |||
| 218 | The method is based on the go implementation of | ||
| 219 | `repoRootForImportDynamic` in | ||
| 220 | https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go | ||
| 221 | """ | ||
| 222 | url = urllib.parse.urlparse("https://" + modulepath) | ||
| 223 | |||
| 224 | class GoImportHTMLParser(HTMLParser): | ||
| 225 | |||
| 226 | def __init__(self): | ||
| 227 | super().__init__() | ||
| 228 | self.__srv = {} | ||
| 229 | |||
| 230 | def handle_starttag(self, tag, attrs): | ||
| 231 | if tag == 'meta' and list( | ||
| 232 | filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)): | ||
| 233 | content = list( | ||
| 234 | filter(lambda a: (a[0] == 'content'), attrs)) | ||
| 235 | if content: | ||
| 236 | srv = content[0][1].split() | ||
| 237 | self.__srv[srv[0]] = srv | ||
| 238 | |||
| 239 | def go_import(self, modulepath): | ||
| 240 | if modulepath in self.__srv: | ||
| 241 | srv = self.__srv[modulepath] | ||
| 242 | return GoImport(srv[0], srv[1], srv[2], None) | ||
| 243 | return None | ||
| 244 | |||
| 245 | url = url.geturl() + "?go-get=1" | ||
| 246 | req = urllib.request.Request(url) | ||
| 247 | |||
| 248 | try: | ||
| 249 | body = urllib.request.urlopen(req).read() | ||
| 250 | except HTTPError as http_err: | ||
| 251 | logger.warning( | ||
| 252 | "Unclean status when fetching page from [%s]: %s", url, str(http_err)) | ||
| 253 | body = http_err.fp.read() | ||
| 254 | except URLError as url_err: | ||
| 255 | logger.warning( | ||
| 256 | "Failed to fetch page from [%s]: %s", url, str(url_err)) | ||
| 257 | return None | ||
| 258 | |||
| 259 | parser = GoImportHTMLParser() | ||
| 260 | parser.feed(body.decode('utf-8')) | ||
| 261 | parser.close() | ||
| 262 | |||
| 263 | return parser.go_import(modulepath) | ||
| 264 | |||
| 265 | def __resolve_from_golang_proxy(self, modulepath, version): | ||
| 266 | """ | ||
| 267 | Resolves repository data from golang proxy | ||
| 268 | """ | ||
| 269 | url = urllib.parse.urlparse("https://proxy.golang.org/" | ||
| 270 | + modulepath | ||
| 271 | + "/@v/" | ||
| 272 | + version | ||
| 273 | + ".info") | ||
| 274 | |||
| 275 | # Transform url to lower case, golang proxy doesn't like mixed case | ||
| 276 | req = urllib.request.Request(url.geturl().lower()) | ||
| 277 | |||
| 278 | try: | ||
| 279 | resp = urllib.request.urlopen(req) | ||
| 280 | except URLError as url_err: | ||
| 281 | logger.warning( | ||
| 282 | "Failed to fetch page from [%s]: %s", url, str(url_err)) | ||
| 283 | return None | ||
| 284 | |||
| 285 | golang_proxy_res = resp.read().decode('utf-8') | ||
| 286 | modinfo = json.loads(golang_proxy_res) | ||
| 287 | |||
| 288 | if modinfo and 'Origin' in modinfo: | ||
| 289 | origin = modinfo['Origin'] | ||
| 290 | _root_url = urllib.parse.urlparse(origin['URL']) | ||
| 291 | |||
| 292 | # We normalize the repo URL since we don't want the scheme in it | ||
| 293 | _subdir = origin['Subdir'] if 'Subdir' in origin else None | ||
| 294 | _root, _, _ = self.__split_path_version(modulepath) | ||
| 295 | if _subdir: | ||
| 296 | _root = _root[:-len(_subdir)].strip('/') | ||
| 297 | |||
| 298 | _commit = origin['Hash'] | ||
| 299 | _vcs = origin['VCS'] | ||
| 300 | return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit) | ||
| 301 | |||
| 302 | return None | ||
| 303 | |||
| 304 | def __resolve_repository(self, modulepath): | ||
| 305 | """ | ||
| 306 | Resolves src uri from go module-path | ||
| 307 | """ | ||
| 308 | repodata = self.__resolve_repository_static(modulepath) | ||
| 309 | if not repodata or not repodata.url: | ||
| 310 | repodata = self.__resolve_repository_dynamic(modulepath) | ||
| 311 | if not repodata or not repodata.url: | ||
| 312 | logger.error( | ||
| 313 | "Could not resolve repository for module path '%s'" % modulepath) | ||
| 314 | # There is no way to recover from this | ||
| 315 | sys.exit(14) | ||
| 316 | if repodata: | ||
| 317 | logger.debug(1, "Resolved download path for import '%s' => %s" % ( | ||
| 318 | modulepath, repodata.url)) | ||
| 319 | return repodata | ||
| 320 | |||
| 321 | def __split_path_version(self, path): | ||
| 322 | i = len(path) | ||
| 323 | dot = False | ||
| 324 | for j in range(i, 0, -1): | ||
| 325 | if path[j - 1] < '0' or path[j - 1] > '9': | ||
| 326 | break | ||
| 327 | if path[j - 1] == '.': | ||
| 328 | dot = True | ||
| 329 | break | ||
| 330 | i = j - 1 | ||
| 331 | |||
| 332 | if i <= 1 or i == len( | ||
| 333 | path) or path[i - 1] != 'v' or path[i - 2] != '/': | ||
| 334 | return path, "", True | ||
| 335 | |||
| 336 | prefix, pathMajor = path[:i - 2], path[i - 2:] | ||
| 337 | if dot or len( | ||
| 338 | pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1": | ||
| 339 | return path, "", False | ||
| 340 | |||
| 341 | return prefix, pathMajor, True | ||
| 342 | |||
| 343 | def __get_path_major(self, pathMajor): | ||
| 344 | if not pathMajor: | ||
| 345 | return "" | ||
| 346 | |||
| 347 | if pathMajor[0] != '/' and pathMajor[0] != '.': | ||
| 348 | logger.error( | ||
| 349 | "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor) | ||
| 350 | |||
| 351 | if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"): | ||
| 352 | pathMajor = pathMajor[:len("-unstable") - 2] | ||
| 353 | |||
| 354 | return pathMajor[1:] | ||
| 355 | |||
| 356 | def __build_coderepo(self, repo, path): | ||
| 357 | codedir = "" | ||
| 358 | pathprefix, pathMajor, _ = self.__split_path_version(path) | ||
| 359 | if repo.root == path: | ||
| 360 | pathprefix = path | ||
| 361 | elif path.startswith(repo.root): | ||
| 362 | codedir = pathprefix[len(repo.root):].strip('/') | ||
| 363 | |||
| 364 | pseudoMajor = self.__get_path_major(pathMajor) | ||
| 365 | |||
| 366 | logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'", | ||
| 367 | repo.root, codedir, pathprefix, pathMajor, pseudoMajor) | ||
| 368 | |||
| 369 | return CodeRepo(path, repo.root, codedir, | ||
| 370 | pathMajor, pathprefix, pseudoMajor) | ||
| 371 | |||
| 372 | def __resolve_version(self, repo, path, version): | ||
| 373 | hash = None | ||
| 374 | coderoot = self.__build_coderepo(repo, path) | ||
| 375 | |||
| 376 | def vcs_fetch_all(): | ||
| 377 | tmpdir = tempfile.mkdtemp() | ||
| 378 | clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir) | ||
| 379 | bb.process.run(clone_cmd) | ||
| 380 | log_cmd = "git log --all --pretty='%H %d' --decorate=short" | ||
| 381 | output, _ = bb.process.run( | ||
| 382 | log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir) | ||
| 383 | bb.utils.prunedir(tmpdir) | ||
| 384 | return output.strip().split('\n') | ||
| 385 | |||
| 386 | def vcs_fetch_remote(tag): | ||
| 387 | # add * to grab ^{} | ||
| 388 | refs = {} | ||
| 389 | ls_remote_cmd = "git ls-remote -q --tags {} {}*".format( | ||
| 390 | repo.url, tag) | ||
| 391 | output, _ = bb.process.run(ls_remote_cmd) | ||
| 392 | output = output.strip().split('\n') | ||
| 393 | for line in output: | ||
| 394 | f = line.split(maxsplit=1) | ||
| 395 | if len(f) != 2: | ||
| 396 | continue | ||
| 397 | |||
| 398 | for prefix in ["HEAD", "refs/heads/", "refs/tags/"]: | ||
| 399 | if f[1].startswith(prefix): | ||
| 400 | refs[f[1][len(prefix):]] = f[0] | ||
| 401 | |||
| 402 | for key, hash in refs.items(): | ||
| 403 | if key.endswith(r"^{}"): | ||
| 404 | refs[key.strip(r"^{}")] = hash | ||
| 405 | |||
| 406 | return refs[tag] | ||
| 407 | |||
| 408 | m_pseudo_semver = re_pseudo_semver.match(version) | ||
| 409 | |||
| 410 | if m_pseudo_semver: | ||
| 411 | remote_refs = vcs_fetch_all() | ||
| 412 | short_commit = m_pseudo_semver.group('commithash') | ||
| 413 | for l in remote_refs: | ||
| 414 | r = l.split(maxsplit=1) | ||
| 415 | sha1 = r[0] if len(r) else None | ||
| 416 | if not sha1: | ||
| 417 | logger.error( | ||
| 418 | "Ups: could not resolve abbref commit for %s" % short_commit) | ||
| 419 | |||
| 420 | elif sha1.startswith(short_commit): | ||
| 421 | hash = sha1 | ||
| 422 | break | ||
| 423 | else: | ||
| 424 | m_semver = re_semver.match(version) | ||
| 425 | if m_semver: | ||
| 426 | |||
| 427 | def get_sha1_remote(re): | ||
| 428 | rsha1 = None | ||
| 429 | for line in remote_refs: | ||
| 430 | # Split lines of the following format: | ||
| 431 | # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag) | ||
| 432 | lineparts = line.split(maxsplit=1) | ||
| 433 | sha1 = lineparts[0] if len(lineparts) else None | ||
| 434 | refstring = lineparts[1] if len( | ||
| 435 | lineparts) == 2 else None | ||
| 436 | if refstring: | ||
| 437 | # Normalize tag string and split in case of multiple | ||
| 438 | # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...) | ||
| 439 | refs = refstring.strip('(), ').split(',') | ||
| 440 | for ref in refs: | ||
| 441 | if re.match(ref.strip()): | ||
| 442 | rsha1 = sha1 | ||
| 443 | return rsha1 | ||
| 444 | |||
| 445 | semver = "v" + m_semver.group('major') + "."\ | ||
| 446 | + m_semver.group('minor') + "."\ | ||
| 447 | + m_semver.group('patch') \ | ||
| 448 | + (("-" + m_semver.group('prerelease')) | ||
| 449 | if m_semver.group('prerelease') else "") | ||
| 450 | |||
| 451 | tag = os.path.join( | ||
| 452 | coderoot.codeDir, semver) if coderoot.codeDir else semver | ||
| 453 | |||
| 454 | # probe tag using 'ls-remote', which is faster than fetching | ||
| 455 | # complete history | ||
| 456 | hash = vcs_fetch_remote(tag) | ||
| 457 | if not hash: | ||
| 458 | # backup: fetch complete history | ||
| 459 | remote_refs = vcs_fetch_all() | ||
| 460 | hash = get_sha1_remote( | ||
| 461 | re.compile(fr"(tag:|HEAD ->) ({tag})")) | ||
| 462 | |||
| 463 | logger.debug( | ||
| 464 | "Resolving commit for tag '%s' -> '%s'", tag, hash) | ||
| 465 | return hash | ||
| 466 | |||
| 467 | def __generate_srcuri_inline_fcn(self, path, version, replaces=None): | ||
| 468 | """Generate SRC_URI functions for go imports""" | ||
| 469 | |||
| 470 | logger.info("Resolving repository for module %s", path) | ||
| 471 | # First try to resolve repo and commit from golang proxy | ||
| 472 | # Most info is already there and we don't have to go through the | ||
| 473 | # repository or even perform the version resolve magic | ||
| 474 | golang_proxy_info = self.__resolve_from_golang_proxy(path, version) | ||
| 475 | if golang_proxy_info: | ||
| 476 | repo = golang_proxy_info[0] | ||
| 477 | commit = golang_proxy_info[1] | ||
| 478 | else: | ||
| 479 | # Fallback | ||
| 480 | # Resolve repository by 'hand' | ||
| 481 | repo = self.__resolve_repository(path) | ||
| 482 | commit = self.__resolve_version(repo, path, version) | ||
| 483 | |||
| 484 | url = urllib.parse.urlparse(repo.url) | ||
| 485 | repo_url = url.netloc + url.path | ||
| 486 | |||
| 487 | coderoot = self.__build_coderepo(repo, path) | ||
| 488 | |||
| 489 | inline_fcn = "${@go_src_uri(" | ||
| 490 | inline_fcn += f"'{repo_url}','{version}'" | ||
| 491 | if repo_url != path: | ||
| 492 | inline_fcn += f",path='{path}'" | ||
| 493 | if coderoot.codeDir: | ||
| 494 | inline_fcn += f",subdir='{coderoot.codeDir}'" | ||
| 495 | if repo.vcs != 'git': | ||
| 496 | inline_fcn += f",vcs='{repo.vcs}'" | ||
| 497 | if replaces: | ||
| 498 | inline_fcn += f",replaces='{replaces}'" | ||
| 499 | if coderoot.pathMajor: | ||
| 500 | inline_fcn += f",pathmajor='{coderoot.pathMajor}'" | ||
| 501 | inline_fcn += ")}" | ||
| 502 | |||
| 503 | return inline_fcn, commit | ||
| 504 | |||
| 505 | def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d): | ||
| 506 | |||
| 507 | import re | ||
| 508 | src_uris = [] | ||
| 509 | src_revs = [] | ||
| 510 | |||
| 511 | def generate_src_rev(path, version, commithash): | ||
| 512 | src_rev = f"# {path}@{version} => {commithash}\n" | ||
| 513 | # Ups...maybe someone manipulated the source repository and the | ||
| 514 | # version or commit could not be resolved. This is a sign of | ||
| 515 | # a) the supply chain was manipulated (bad) | ||
| 516 | # b) the implementation for the version resolving didn't work | ||
| 517 | # anymore (less bad) | ||
| 518 | if not commithash: | ||
| 519 | src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" | ||
| 520 | src_rev += f"#!!! Could not resolve version !!!\n" | ||
| 521 | src_rev += f"#!!! Possible supply chain attack !!!\n" | ||
| 522 | src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" | ||
| 523 | src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\"" | ||
| 524 | |||
| 525 | return src_rev | ||
| 526 | |||
| 527 | # we first go over replacement list, because we are essentialy | ||
| 528 | # interested only in the replaced path | ||
| 529 | if go_mod['Replace']: | ||
| 530 | for replacement in go_mod['Replace']: | ||
| 531 | oldpath = replacement['Old']['Path'] | ||
| 532 | path = replacement['New']['Path'] | ||
| 533 | version = '' | ||
| 534 | if 'Version' in replacement['New']: | ||
| 535 | version = replacement['New']['Version'] | ||
| 536 | |||
| 537 | if os.path.exists(os.path.join(srctree, path)): | ||
| 538 | # the module refers to the local path, remove it from requirement list | ||
| 539 | # because it's a local module | ||
| 540 | go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath] | ||
| 541 | else: | ||
| 542 | # Replace the path and the version, so we don't iterate replacement list anymore | ||
| 543 | for require in go_mod['Require']: | ||
| 544 | if require['Path'] == oldpath: | ||
| 545 | require.update({'Path': path, 'Version': version}) | ||
| 546 | break | ||
| 547 | |||
| 548 | for require in go_mod['Require']: | ||
| 549 | path = require['Path'] | ||
| 550 | version = require['Version'] | ||
| 551 | |||
| 552 | inline_fcn, commithash = self.__generate_srcuri_inline_fcn( | ||
| 553 | path, version) | ||
| 554 | src_uris.append(inline_fcn) | ||
| 555 | src_revs.append(generate_src_rev(path, version, commithash)) | ||
| 556 | |||
| 557 | # strip version part from module URL /vXX | ||
| 558 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
| 559 | pn, _ = determine_from_url(baseurl) | ||
| 560 | go_mods_basename = "%s-modules.inc" % pn | ||
| 561 | |||
| 562 | go_mods_filename = os.path.join(localfilesdir, go_mods_basename) | ||
| 563 | with open(go_mods_filename, "w") as f: | ||
| 564 | # We introduce this indirection to make the tests a little easier | ||
| 565 | f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n") | ||
| 566 | f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n") | ||
| 567 | for uri in src_uris: | ||
| 568 | f.write(" " + uri + " \\\n") | ||
| 569 | f.write("\"\n\n") | ||
| 570 | for rev in src_revs: | ||
| 571 | f.write(rev + "\n") | ||
| 572 | |||
| 573 | extravalues['extrafiles'][go_mods_basename] = go_mods_filename | ||
| 574 | |||
| 575 | def __go_run_cmd(self, cmd, cwd, d): | ||
| 576 | return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')), | ||
| 577 | shell=True, cwd=cwd) | ||
| 578 | |||
| 579 | def __go_native_version(self, d): | ||
| 580 | stdout, _ = self.__go_run_cmd("go version", None, d) | ||
| 581 | m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout) | ||
| 582 | major = int(m.group(2)) | ||
| 583 | minor = int(m.group(3)) | ||
| 584 | patch = int(m.group(4)) | ||
| 585 | |||
| 586 | return major, minor, patch | ||
| 587 | |||
| 588 | def __go_mod_patch(self, srctree, localfilesdir, extravalues, d): | ||
| 589 | |||
| 590 | patchfilename = "go.mod.patch" | ||
| 591 | go_native_version_major, go_native_version_minor, _ = self.__go_native_version( | ||
| 592 | d) | ||
| 593 | self.__go_run_cmd("go mod tidy -go=%d.%d" % | ||
| 594 | (go_native_version_major, go_native_version_minor), srctree, d) | ||
| 595 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | ||
| 596 | |||
| 597 | # Create patch in order to upgrade go version | ||
| 598 | self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d) | ||
| 599 | # Restore original state | ||
| 600 | self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d) | ||
| 601 | |||
| 602 | go_mod = json.loads(stdout) | ||
| 603 | tmpfile = os.path.join(localfilesdir, patchfilename) | ||
| 604 | shutil.move(os.path.join(srctree, patchfilename), tmpfile) | ||
| 605 | |||
| 606 | extravalues['extrafiles'][patchfilename] = tmpfile | ||
| 607 | |||
| 608 | return go_mod, patchfilename | ||
| 609 | |||
| 610 | def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d): | ||
| 611 | # Perform vendoring to retrieve the correct modules.txt | ||
| 612 | tmp_vendor_dir = tempfile.mkdtemp() | ||
| 613 | |||
| 614 | # -v causes to go to print modules.txt to stderr | ||
| 615 | _, stderr = self.__go_run_cmd( | ||
| 616 | "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d) | ||
| 617 | |||
| 618 | modules_txt_basename = "modules.txt" | ||
| 619 | modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename) | ||
| 620 | with open(modules_txt_filename, "w") as f: | ||
| 621 | f.write(stderr) | ||
| 622 | |||
| 623 | extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename | ||
| 624 | |||
| 625 | licenses = [] | ||
| 626 | lic_files_chksum = [] | ||
| 627 | licvalues = guess_license(tmp_vendor_dir, d) | ||
| 628 | shutil.rmtree(tmp_vendor_dir) | ||
| 629 | |||
| 630 | if licvalues: | ||
| 631 | for licvalue in licvalues: | ||
| 632 | license = licvalue[0] | ||
| 633 | lics = tidy_licenses(fixup_license(license)) | ||
| 634 | lics = [lic for lic in lics if lic not in licenses] | ||
| 635 | if len(lics): | ||
| 636 | licenses.extend(lics) | ||
| 637 | lic_files_chksum.append( | ||
| 638 | 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2])) | ||
| 639 | |||
| 640 | # strip version part from module URL /vXX | ||
| 641 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
| 642 | pn, _ = determine_from_url(baseurl) | ||
| 643 | licenses_basename = "%s-licenses.inc" % pn | ||
| 644 | |||
| 645 | licenses_filename = os.path.join(localfilesdir, licenses_basename) | ||
| 646 | with open(licenses_filename, "w") as f: | ||
| 647 | f.write("GO_MOD_LICENSES = \"%s\"\n\n" % | ||
| 648 | ' & '.join(sorted(licenses, key=str.casefold))) | ||
| 649 | # We introduce this indirection to make the tests a little easier | ||
| 650 | f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n") | ||
| 651 | f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n") | ||
| 652 | for lic in lic_files_chksum: | ||
| 653 | f.write(" " + lic + " \\\n") | ||
| 654 | f.write("\"\n") | ||
| 655 | |||
| 656 | extravalues['extrafiles'][licenses_basename] = licenses_filename | ||
| 657 | |||
| 658 | def process(self, srctree, classes, lines_before, | 66 | def process(self, srctree, classes, lines_before, |
| 659 | lines_after, handled, extravalues): | 67 | lines_after, handled, extravalues): |
| 660 | 68 | ||
| @@ -665,63 +73,51 @@ class GoRecipeHandler(RecipeHandler): | |||
| 665 | if not files: | 73 | if not files: |
| 666 | return False | 74 | return False |
| 667 | 75 | ||
| 668 | d = bb.data.createCopy(tinfoil.config_data) | ||
| 669 | go_bindir = self.__ensure_go() | 76 | go_bindir = self.__ensure_go() |
| 670 | if not go_bindir: | 77 | if not go_bindir: |
| 671 | sys.exit(14) | 78 | sys.exit(14) |
| 672 | 79 | ||
| 673 | d.prependVar('PATH', '%s:' % go_bindir) | ||
| 674 | handled.append('buildsystem') | 80 | handled.append('buildsystem') |
| 675 | classes.append("go-vendor") | 81 | classes.append("go-mod") |
| 676 | 82 | ||
| 677 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | 83 | # Use go-mod-update-modules to set the full SRC_URI and LICENSE |
| 84 | classes.append("go-mod-update-modules") | ||
| 85 | extravalues["run_tasks"] = "update_modules" | ||
| 678 | 86 | ||
| 87 | env = dict(os.environ) | ||
| 88 | env["PATH"] += f":{go_bindir}" | ||
| 89 | |||
| 90 | stdout = subprocess.check_output(("go", "mod", "edit", "-json"), | ||
| 91 | cwd=srctree, env=env, text=True) | ||
| 679 | go_mod = json.loads(stdout) | 92 | go_mod = json.loads(stdout) |
| 680 | go_import = go_mod['Module']['Path'] | 93 | go_import = re.sub(r'/v([0-9]+)$', '', go_mod['Module']['Path']) |
| 681 | go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go']) | ||
| 682 | go_version_major = int(go_version_match.group(1)) | ||
| 683 | go_version_minor = int(go_version_match.group(2)) | ||
| 684 | src_uris = [] | ||
| 685 | 94 | ||
| 686 | localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') | 95 | localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') |
| 687 | extravalues.setdefault('extrafiles', {}) | 96 | extravalues.setdefault('extrafiles', {}) |
| 688 | 97 | ||
| 689 | # Use an explicit name determined from the module name because it | 98 | # Write the stub ${BPN}-licenses.inc and ${BPN}-go-mods.inc files |
| 690 | # might differ from the actual URL for replaced modules | 99 | basename = "{pn}-licenses.inc" |
| 691 | # strip version part from module URL /vXX | 100 | filename = os.path.join(localfilesdir, basename) |
| 692 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | 101 | with open(filename, "w") as f: |
| 693 | pn, _ = determine_from_url(baseurl) | 102 | f.write("# FROM RECIPETOOL\n") |
| 694 | 103 | extravalues['extrafiles'][f"../{basename}"] = filename | |
| 695 | # go.mod files with version < 1.17 may not include all indirect | ||
| 696 | # dependencies. Thus, we have to upgrade the go version. | ||
| 697 | if go_version_major == 1 and go_version_minor < 17: | ||
| 698 | logger.warning( | ||
| 699 | "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.") | ||
| 700 | go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir, | ||
| 701 | extravalues, d) | ||
| 702 | src_uris.append( | ||
| 703 | "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename)) | ||
| 704 | |||
| 705 | # Check whether the module is vendored. If so, we have nothing to do. | ||
| 706 | # Otherwise we gather all dependencies and add them to the recipe | ||
| 707 | if not os.path.exists(os.path.join(srctree, "vendor")): | ||
| 708 | |||
| 709 | # Write additional $BPN-modules.inc file | ||
| 710 | self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d) | ||
| 711 | lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"") | ||
| 712 | lines_before.append("require %s-licenses.inc" % (pn)) | ||
| 713 | 104 | ||
| 714 | self.__rewrite_src_uri(lines_before, ["file://modules.txt"]) | 105 | basename = "{pn}-go-mods.inc" |
| 715 | 106 | filename = os.path.join(localfilesdir, basename) | |
| 716 | self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d) | 107 | with open(filename, "w") as f: |
| 717 | lines_before.append("require %s-modules.inc" % (pn)) | 108 | f.write("# FROM RECIPETOOL\n") |
| 109 | extravalues['extrafiles'][f"../{basename}"] = filename | ||
| 718 | 110 | ||
| 719 | # Do generic license handling | 111 | # Do generic license handling |
| 112 | d = bb.data.createCopy(tinfoil.config_data) | ||
| 720 | handle_license_vars(srctree, lines_before, handled, extravalues, d) | 113 | handle_license_vars(srctree, lines_before, handled, extravalues, d) |
| 721 | self.__rewrite_lic_uri(lines_before) | 114 | self.__rewrite_lic_vars(lines_before) |
| 722 | 115 | ||
| 723 | lines_before.append("GO_IMPORT = \"{}\"".format(baseurl)) | 116 | self.__rewrite_src_uri(lines_before) |
| 724 | lines_before.append("SRCREV_FORMAT = \"${BPN}\"") | 117 | |
| 118 | lines_before.append('require ${BPN}-licenses.inc') | ||
| 119 | lines_before.append('require ${BPN}-go-mods.inc') | ||
| 120 | lines_before.append(f'GO_IMPORT = "{go_import}"') | ||
| 725 | 121 | ||
| 726 | def __update_lines_before(self, updated, newlines, lines_before): | 122 | def __update_lines_before(self, updated, newlines, lines_before): |
| 727 | if updated: | 123 | if updated: |
| @@ -733,9 +129,9 @@ class GoRecipeHandler(RecipeHandler): | |||
| 733 | lines_before.append(line) | 129 | lines_before.append(line) |
| 734 | return updated | 130 | return updated |
| 735 | 131 | ||
| 736 | def __rewrite_lic_uri(self, lines_before): | 132 | def __rewrite_lic_vars(self, lines_before): |
| 737 | |||
| 738 | def varfunc(varname, origvalue, op, newlines): | 133 | def varfunc(varname, origvalue, op, newlines): |
| 134 | import urllib.parse | ||
| 739 | if varname == 'LIC_FILES_CHKSUM': | 135 | if varname == 'LIC_FILES_CHKSUM': |
| 740 | new_licenses = [] | 136 | new_licenses = [] |
| 741 | licenses = origvalue.split('\\') | 137 | licenses = origvalue.split('\\') |
| @@ -760,12 +156,11 @@ class GoRecipeHandler(RecipeHandler): | |||
| 760 | lines_before, ['LIC_FILES_CHKSUM'], varfunc) | 156 | lines_before, ['LIC_FILES_CHKSUM'], varfunc) |
| 761 | return self.__update_lines_before(updated, newlines, lines_before) | 157 | return self.__update_lines_before(updated, newlines, lines_before) |
| 762 | 158 | ||
| 763 | def __rewrite_src_uri(self, lines_before, additional_uris = []): | 159 | def __rewrite_src_uri(self, lines_before): |
| 764 | 160 | ||
| 765 | def varfunc(varname, origvalue, op, newlines): | 161 | def varfunc(varname, origvalue, op, newlines): |
| 766 | if varname == 'SRC_URI': | 162 | if varname == 'SRC_URI': |
| 767 | src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"] | 163 | src_uri = ['git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}'] |
| 768 | src_uri.extend(additional_uris) | ||
| 769 | return src_uri, None, -1, True | 164 | return src_uri, None, -1, True |
| 770 | return origvalue, None, 0, True | 165 | return origvalue, None, 0, True |
| 771 | 166 | ||
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py index 113a89f6a6..8c4cdd5234 100644 --- a/scripts/lib/recipetool/create_npm.py +++ b/scripts/lib/recipetool/create_npm.py | |||
| @@ -15,9 +15,9 @@ import bb | |||
| 15 | from bb.fetch2.npm import NpmEnvironment | 15 | from bb.fetch2.npm import NpmEnvironment |
| 16 | from bb.fetch2.npm import npm_package | 16 | from bb.fetch2.npm import npm_package |
| 17 | from bb.fetch2.npmsw import foreach_dependencies | 17 | from bb.fetch2.npmsw import foreach_dependencies |
| 18 | from oe.license_finder import match_licenses, find_license_files | ||
| 18 | from recipetool.create import RecipeHandler | 19 | from recipetool.create import RecipeHandler |
| 19 | from recipetool.create import get_license_md5sums | 20 | from recipetool.create import generate_common_licenses_chksums |
| 20 | from recipetool.create import guess_license | ||
| 21 | from recipetool.create import split_pkg_licenses | 21 | from recipetool.create import split_pkg_licenses |
| 22 | logger = logging.getLogger('recipetool') | 22 | logger = logging.getLogger('recipetool') |
| 23 | 23 | ||
| @@ -112,40 +112,54 @@ class NpmRecipeHandler(RecipeHandler): | |||
| 112 | """Return the extra license files and the list of packages""" | 112 | """Return the extra license files and the list of packages""" |
| 113 | licfiles = [] | 113 | licfiles = [] |
| 114 | packages = {} | 114 | packages = {} |
| 115 | # Licenses from package.json will point to COMMON_LICENSE_DIR so we need | ||
| 116 | # to associate them explicitely to packages for split_pkg_licenses() | ||
| 117 | fallback_licenses = dict() | ||
| 118 | |||
| 119 | def _find_package_licenses(destdir): | ||
| 120 | """Either find license files, or use package.json metadata""" | ||
| 121 | def _get_licenses_from_package_json(package_json): | ||
| 122 | with open(os.path.join(srctree, package_json), "r") as f: | ||
| 123 | data = json.load(f) | ||
| 124 | if "license" in data: | ||
| 125 | licenses = data["license"].split(" ") | ||
| 126 | licenses = [license.strip("()") for license in licenses if license != "OR" and license != "AND"] | ||
| 127 | return [], licenses | ||
| 128 | else: | ||
| 129 | return [package_json], None | ||
| 115 | 130 | ||
| 116 | # Handle the parent package | ||
| 117 | packages["${PN}"] = "" | ||
| 118 | |||
| 119 | def _licfiles_append_fallback_readme_files(destdir): | ||
| 120 | """Append README files as fallback to license files if a license files is missing""" | ||
| 121 | |||
| 122 | fallback = True | ||
| 123 | readmes = [] | ||
| 124 | basedir = os.path.join(srctree, destdir) | 131 | basedir = os.path.join(srctree, destdir) |
| 125 | for fn in os.listdir(basedir): | 132 | licfiles = find_license_files(basedir) |
| 126 | upper = fn.upper() | 133 | if len(licfiles) > 0: |
| 127 | if upper.startswith("README"): | 134 | return licfiles, None |
| 128 | fullpath = os.path.join(basedir, fn) | 135 | else: |
| 129 | readmes.append(fullpath) | 136 | # A license wasn't found in the package directory, so we'll use the package.json metadata |
| 130 | if upper.startswith("COPYING") or "LICENCE" in upper or "LICENSE" in upper: | 137 | pkg_json = os.path.join(basedir, "package.json") |
| 131 | fallback = False | 138 | return _get_licenses_from_package_json(pkg_json) |
| 132 | if fallback: | 139 | |
| 133 | for readme in readmes: | 140 | def _get_package_licenses(destdir, package): |
| 134 | licfiles.append(os.path.relpath(readme, srctree)) | 141 | (package_licfiles, package_licenses) = _find_package_licenses(destdir) |
| 142 | if package_licfiles: | ||
| 143 | licfiles.extend(package_licfiles) | ||
| 144 | else: | ||
| 145 | fallback_licenses[package] = package_licenses | ||
| 135 | 146 | ||
| 136 | # Handle the dependencies | 147 | # Handle the dependencies |
| 137 | def _handle_dependency(name, params, destdir): | 148 | def _handle_dependency(name, params, destdir): |
| 138 | deptree = destdir.split('node_modules/') | 149 | deptree = destdir.split('node_modules/') |
| 139 | suffix = "-".join([npm_package(dep) for dep in deptree]) | 150 | suffix = "-".join([npm_package(dep) for dep in deptree]) |
| 140 | packages["${PN}" + suffix] = destdir | 151 | packages["${PN}" + suffix] = destdir |
| 141 | _licfiles_append_fallback_readme_files(destdir) | 152 | _get_package_licenses(destdir, "${PN}" + suffix) |
| 142 | 153 | ||
| 143 | with open(shrinkwrap_file, "r") as f: | 154 | with open(shrinkwrap_file, "r") as f: |
| 144 | shrinkwrap = json.load(f) | 155 | shrinkwrap = json.load(f) |
| 145 | |||
| 146 | foreach_dependencies(shrinkwrap, _handle_dependency, dev) | 156 | foreach_dependencies(shrinkwrap, _handle_dependency, dev) |
| 147 | 157 | ||
| 148 | return licfiles, packages | 158 | # Handle the parent package |
| 159 | packages["${PN}"] = "" | ||
| 160 | _get_package_licenses(srctree, "${PN}") | ||
| 161 | |||
| 162 | return licfiles, packages, fallback_licenses | ||
| 149 | 163 | ||
| 150 | # Handle the peer dependencies | 164 | # Handle the peer dependencies |
| 151 | def _handle_peer_dependency(self, shrinkwrap_file): | 165 | def _handle_peer_dependency(self, shrinkwrap_file): |
| @@ -266,36 +280,12 @@ class NpmRecipeHandler(RecipeHandler): | |||
| 266 | fetcher.unpack(srctree) | 280 | fetcher.unpack(srctree) |
| 267 | 281 | ||
| 268 | bb.note("Handling licences ...") | 282 | bb.note("Handling licences ...") |
| 269 | (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev) | 283 | (licfiles, packages, fallback_licenses) = self._handle_licenses(srctree, shrinkwrap_file, dev) |
| 270 | 284 | licvalues = match_licenses(licfiles, srctree, d) | |
| 271 | def _guess_odd_license(licfiles): | 285 | split_pkg_licenses(licvalues, packages, lines_after, fallback_licenses) |
| 272 | import bb | 286 | fallback_licenses_flat = [license for sublist in fallback_licenses.values() for license in sublist] |
| 273 | 287 | extravalues["LIC_FILES_CHKSUM"] = generate_common_licenses_chksums(fallback_licenses_flat, d) | |
| 274 | md5sums = get_license_md5sums(d, linenumbers=True) | 288 | extravalues["LICENSE"] = fallback_licenses_flat |
| 275 | |||
| 276 | chksums = [] | ||
| 277 | licenses = [] | ||
| 278 | for licfile in licfiles: | ||
| 279 | f = os.path.join(srctree, licfile) | ||
| 280 | md5value = bb.utils.md5_file(f) | ||
| 281 | (license, beginline, endline, md5) = md5sums.get(md5value, | ||
| 282 | (None, "", "", "")) | ||
| 283 | if not license: | ||
| 284 | license = "Unknown" | ||
| 285 | logger.info("Please add the following line for '%s' to a " | ||
| 286 | "'lib/recipetool/licenses.csv' and replace `Unknown`, " | ||
| 287 | "`X`, `Y` and `MD5` with the license, begin line, " | ||
| 288 | "end line and partial MD5 checksum:\n" \ | ||
| 289 | "%s,Unknown,X,Y,MD5" % (licfile, md5value)) | ||
| 290 | chksums.append("file://%s%s%s;md5=%s" % (licfile, | ||
| 291 | ";beginline=%s" % (beginline) if beginline else "", | ||
| 292 | ";endline=%s" % (endline) if endline else "", | ||
| 293 | md5 if md5 else md5value)) | ||
| 294 | licenses.append((license, licfile, md5value)) | ||
| 295 | return (licenses, chksums) | ||
| 296 | |||
| 297 | (licenses, extravalues["LIC_FILES_CHKSUM"]) = _guess_odd_license(licfiles) | ||
| 298 | split_pkg_licenses([*licenses, *guess_license(srctree, d)], packages, lines_after) | ||
| 299 | 289 | ||
| 300 | classes.append("npm") | 290 | classes.append("npm") |
| 301 | handled.append("buildsystem") | 291 | handled.append("buildsystem") |
diff --git a/scripts/lib/recipetool/licenses.csv b/scripts/lib/recipetool/licenses.csv deleted file mode 100644 index 80851111b3..0000000000 --- a/scripts/lib/recipetool/licenses.csv +++ /dev/null | |||
| @@ -1,37 +0,0 @@ | |||
| 1 | 0636e73ff0215e8d672dc4c32c317bb3,GPL-2.0-only | ||
| 2 | 12f884d2ae1ff87c09e5b7ccc2c4ca7e,GPL-2.0-only | ||
| 3 | 18810669f13b87348459e611d31ab760,GPL-2.0-only | ||
| 4 | 252890d9eee26aab7b432e8b8a616475,LGPL-2.0-only | ||
| 5 | 2d5025d4aa3495befef8f17206a5b0a1,LGPL-2.1-only | ||
| 6 | 3214f080875748938ba060314b4f727d,LGPL-2.0-only | ||
| 7 | 385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0-only | ||
| 8 | 393a5ca445f6965873eca0259a17f833,GPL-2.0-only | ||
| 9 | 3b83ef96387f14655fc854ddc3c6bd57,Apache-2.0 | ||
| 10 | 3bf50002aefd002f49e7bb854063f7e7,LGPL-2.0-only | ||
| 11 | 4325afd396febcb659c36b49533135d4,GPL-2.0-only | ||
| 12 | 4fbd65380cdd255951079008b364516c,LGPL-2.1-only | ||
| 13 | 54c7042be62e169199200bc6477f04d1,BSD-3-Clause | ||
| 14 | 55ca817ccb7d5b5b66355690e9abc605,LGPL-2.0-only | ||
| 15 | 59530bdf33659b29e73d4adb9f9f6552,GPL-2.0-only | ||
| 16 | 5f30f0716dfdd0d91eb439ebec522ec2,LGPL-2.0-only | ||
| 17 | 6a6a8e020838b23406c81b19c1d46df6,LGPL-3.0-only | ||
| 18 | 751419260aa954499f7abaabaa882bbe,GPL-2.0-only | ||
| 19 | 7fbc338309ac38fefcd64b04bb903e34,LGPL-2.1-only | ||
| 20 | 8ca43cbc842c2336e835926c2166c28b,GPL-2.0-only | ||
| 21 | 94d55d512a9ba36caa9b7df079bae19f,GPL-2.0-only | ||
| 22 | 9ac2e7cff1ddaf48b6eab6028f23ef88,GPL-2.0-only | ||
| 23 | 9f604d8a4f8e74f4f5140845a21b6674,LGPL-2.0-only | ||
| 24 | a6f89e2100d9b6cdffcea4f398e37343,LGPL-2.1-only | ||
| 25 | b234ee4d69f5fce4486a80fdaf4a4263,GPL-2.0-only | ||
| 26 | bbb461211a33b134d42ed5ee802b37ff,LGPL-2.1-only | ||
| 27 | bfe1f75d606912a4111c90743d6c7325,MPL-1.1-only | ||
| 28 | c93c0550bd3173f4504b2cbd8991e50b,GPL-2.0-only | ||
| 29 | d32239bcb673463ab874e80d47fae504,GPL-3.0-only | ||
| 30 | d7810fab7487fb0aad327b76f1be7cd7,GPL-2.0-only | ||
| 31 | d8045f3b8f929c1cb29a1e3fd737b499,LGPL-2.1-only | ||
| 32 | db979804f025cf55aabec7129cb671ed,LGPL-2.0-only | ||
| 33 | eb723b61539feef013de476e68b5c50a,GPL-2.0-only | ||
| 34 | ebb5c50ab7cab4baeffba14977030c07,GPL-2.0-only | ||
| 35 | f27defe1e96c2e1ecd4e0c9be8967949,GPL-3.0-only | ||
| 36 | fad9b3332be894bab9bc501572864b29,LGPL-2.1-only | ||
| 37 | fbc093901857fcd118f065f900982c24,LGPL-2.1-only | ||
diff --git a/scripts/lib/resulttool/junit.py b/scripts/lib/resulttool/junit.py new file mode 100644 index 0000000000..c7a53dc550 --- /dev/null +++ b/scripts/lib/resulttool/junit.py | |||
| @@ -0,0 +1,77 @@ | |||
| 1 | # resulttool - report test results in JUnit XML format | ||
| 2 | # | ||
| 3 | # Copyright (c) 2024, Siemens AG. | ||
| 4 | # | ||
| 5 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 6 | # | ||
| 7 | |||
| 8 | import os | ||
| 9 | import re | ||
| 10 | import xml.etree.ElementTree as ET | ||
| 11 | import resulttool.resultutils as resultutils | ||
| 12 | |||
| 13 | def junit(args, logger): | ||
| 14 | testresults = resultutils.load_resultsdata(args.json_file, configmap=resultutils.store_map) | ||
| 15 | |||
| 16 | total_time = 0 | ||
| 17 | skipped = 0 | ||
| 18 | failures = 0 | ||
| 19 | errors = 0 | ||
| 20 | |||
| 21 | for tests in testresults.values(): | ||
| 22 | results = tests[next(reversed(tests))].get("result", {}) | ||
| 23 | |||
| 24 | for result_id, result in results.items(): | ||
| 25 | # filter out ptestresult.rawlogs and ptestresult.sections | ||
| 26 | if re.search(r'\.test_', result_id): | ||
| 27 | total_time += result.get("duration", 0) | ||
| 28 | |||
| 29 | if result['status'] == "FAILED": | ||
| 30 | failures += 1 | ||
| 31 | elif result['status'] == "ERROR": | ||
| 32 | errors += 1 | ||
| 33 | elif result['status'] == "SKIPPED": | ||
| 34 | skipped += 1 | ||
| 35 | |||
| 36 | testsuites_node = ET.Element("testsuites") | ||
| 37 | testsuites_node.set("time", "%s" % total_time) | ||
| 38 | testsuite_node = ET.SubElement(testsuites_node, "testsuite") | ||
| 39 | testsuite_node.set("name", "Testimage") | ||
| 40 | testsuite_node.set("time", "%s" % total_time) | ||
| 41 | testsuite_node.set("tests", "%s" % len(results)) | ||
| 42 | testsuite_node.set("failures", "%s" % failures) | ||
| 43 | testsuite_node.set("errors", "%s" % errors) | ||
| 44 | testsuite_node.set("skipped", "%s" % skipped) | ||
| 45 | |||
| 46 | for result_id, result in results.items(): | ||
| 47 | if re.search(r'\.test_', result_id): | ||
| 48 | testcase_node = ET.SubElement(testsuite_node, "testcase", { | ||
| 49 | "name": result_id, | ||
| 50 | "classname": "Testimage", | ||
| 51 | "time": str(result['duration']) | ||
| 52 | }) | ||
| 53 | if result['status'] == "SKIPPED": | ||
| 54 | ET.SubElement(testcase_node, "skipped", message=result['log']) | ||
| 55 | elif result['status'] == "FAILED": | ||
| 56 | ET.SubElement(testcase_node, "failure", message=result['log']) | ||
| 57 | elif result['status'] == "ERROR": | ||
| 58 | ET.SubElement(testcase_node, "error", message=result['log']) | ||
| 59 | |||
| 60 | tree = ET.ElementTree(testsuites_node) | ||
| 61 | |||
| 62 | if args.junit_xml_path is None: | ||
| 63 | args.junit_xml_path = os.environ['BUILDDIR'] + '/tmp/log/oeqa/junit.xml' | ||
| 64 | tree.write(args.junit_xml_path, encoding='UTF-8', xml_declaration=True) | ||
| 65 | |||
| 66 | logger.info('Saved JUnit XML report as %s' % args.junit_xml_path) | ||
| 67 | |||
| 68 | def register_commands(subparsers): | ||
| 69 | """Register subcommands from this plugin""" | ||
| 70 | parser_build = subparsers.add_parser('junit', help='create test report in JUnit XML format', | ||
| 71 | description='generate unit test report in JUnit XML format based on the latest test results in the testresults.json.', | ||
| 72 | group='analysis') | ||
| 73 | parser_build.set_defaults(func=junit) | ||
| 74 | parser_build.add_argument('json_file', | ||
| 75 | help='json file should point to the testresults.json') | ||
| 76 | parser_build.add_argument('-j', '--junit_xml_path', | ||
| 77 | help='junit xml path allows setting the path of the generated test report. The default location is <build_dir>/tmp/log/oeqa/junit.xml') | ||
diff --git a/scripts/lib/resulttool/manualexecution.py b/scripts/lib/resulttool/manualexecution.py index ecb27c5933..ae0861ac6b 100755 --- a/scripts/lib/resulttool/manualexecution.py +++ b/scripts/lib/resulttool/manualexecution.py | |||
| @@ -22,7 +22,7 @@ def load_json_file(f): | |||
| 22 | def write_json_file(f, json_data): | 22 | def write_json_file(f, json_data): |
| 23 | os.makedirs(os.path.dirname(f), exist_ok=True) | 23 | os.makedirs(os.path.dirname(f), exist_ok=True) |
| 24 | with open(f, 'w') as filedata: | 24 | with open(f, 'w') as filedata: |
| 25 | filedata.write(json.dumps(json_data, sort_keys=True, indent=4)) | 25 | filedata.write(json.dumps(json_data, sort_keys=True, indent=1)) |
| 26 | 26 | ||
| 27 | class ManualTestRunner(object): | 27 | class ManualTestRunner(object): |
| 28 | 28 | ||
diff --git a/scripts/lib/resulttool/regression.py b/scripts/lib/resulttool/regression.py index 10e7d13841..33b3119c54 100644 --- a/scripts/lib/resulttool/regression.py +++ b/scripts/lib/resulttool/regression.py | |||
| @@ -212,6 +212,8 @@ def compare_result(logger, base_name, target_name, base_result, target_result, d | |||
| 212 | 212 | ||
| 213 | if base_result and target_result: | 213 | if base_result and target_result: |
| 214 | for k in base_result: | 214 | for k in base_result: |
| 215 | if k in ['ptestresult.rawlogs', 'ptestresult.sections']: | ||
| 216 | continue | ||
| 215 | base_testcase = base_result[k] | 217 | base_testcase = base_result[k] |
| 216 | base_status = base_testcase.get('status') | 218 | base_status = base_testcase.get('status') |
| 217 | if base_status: | 219 | if base_status: |
| @@ -422,6 +424,7 @@ def register_commands(subparsers): | |||
| 422 | help='(optional) filter the base results to this result ID') | 424 | help='(optional) filter the base results to this result ID') |
| 423 | parser_build.add_argument('-t', '--target-result-id', default='', | 425 | parser_build.add_argument('-t', '--target-result-id', default='', |
| 424 | help='(optional) filter the target results to this result ID') | 426 | help='(optional) filter the target results to this result ID') |
| 427 | parser_build.add_argument('-l', '--limit', default=REGRESSIONS_DISPLAY_LIMIT, help="Maximum number of changes to display per test. Can be set to 0 to print all changes") | ||
| 425 | 428 | ||
| 426 | parser_build = subparsers.add_parser('regression-git', help='regression git analysis', | 429 | parser_build = subparsers.add_parser('regression-git', help='regression git analysis', |
| 427 | description='regression analysis comparing base result set to target ' | 430 | description='regression analysis comparing base result set to target ' |
diff --git a/scripts/lib/resulttool/report.py b/scripts/lib/resulttool/report.py index a349510ab8..1c100b00ab 100644 --- a/scripts/lib/resulttool/report.py +++ b/scripts/lib/resulttool/report.py | |||
| @@ -256,7 +256,7 @@ class ResultsTextReport(object): | |||
| 256 | if selected_test_case_only: | 256 | if selected_test_case_only: |
| 257 | print_selected_testcase_result(raw_results, selected_test_case_only) | 257 | print_selected_testcase_result(raw_results, selected_test_case_only) |
| 258 | else: | 258 | else: |
| 259 | print(json.dumps(raw_results, sort_keys=True, indent=4)) | 259 | print(json.dumps(raw_results, sort_keys=True, indent=1)) |
| 260 | else: | 260 | else: |
| 261 | print('Could not find raw test result for %s' % raw_test) | 261 | print('Could not find raw test result for %s' % raw_test) |
| 262 | return 0 | 262 | return 0 |
diff --git a/scripts/lib/resulttool/resultutils.py b/scripts/lib/resulttool/resultutils.py index c5521d81bd..b8fc79a6ac 100644 --- a/scripts/lib/resulttool/resultutils.py +++ b/scripts/lib/resulttool/resultutils.py | |||
| @@ -14,8 +14,11 @@ import scriptpath | |||
| 14 | import copy | 14 | import copy |
| 15 | import urllib.request | 15 | import urllib.request |
| 16 | import posixpath | 16 | import posixpath |
| 17 | import logging | ||
| 17 | scriptpath.add_oe_lib_path() | 18 | scriptpath.add_oe_lib_path() |
| 18 | 19 | ||
| 20 | logger = logging.getLogger('resulttool') | ||
| 21 | |||
| 19 | flatten_map = { | 22 | flatten_map = { |
| 20 | "oeselftest": [], | 23 | "oeselftest": [], |
| 21 | "runtime": [], | 24 | "runtime": [], |
| @@ -31,13 +34,19 @@ regression_map = { | |||
| 31 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE'] | 34 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE'] |
| 32 | } | 35 | } |
| 33 | store_map = { | 36 | store_map = { |
| 34 | "oeselftest": ['TEST_TYPE'], | 37 | "oeselftest": ['TEST_TYPE', 'TESTSERIES', 'MACHINE'], |
| 35 | "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'], | 38 | "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'], |
| 36 | "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], | 39 | "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], |
| 37 | "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], | 40 | "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], |
| 38 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME'] | 41 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME'] |
| 39 | } | 42 | } |
| 40 | 43 | ||
| 44 | rawlog_sections = { | ||
| 45 | "ptestresult.rawlogs": "ptest", | ||
| 46 | "ltpresult.rawlogs": "ltp", | ||
| 47 | "ltpposixresult.rawlogs": "ltpposix" | ||
| 48 | } | ||
| 49 | |||
| 41 | def is_url(p): | 50 | def is_url(p): |
| 42 | """ | 51 | """ |
| 43 | Helper for determining if the given path is a URL | 52 | Helper for determining if the given path is a URL |
| @@ -108,21 +117,57 @@ def filter_resultsdata(results, resultid): | |||
| 108 | newresults[r][i] = results[r][i] | 117 | newresults[r][i] = results[r][i] |
| 109 | return newresults | 118 | return newresults |
| 110 | 119 | ||
| 111 | def strip_ptestresults(results): | 120 | def strip_logs(results): |
| 112 | newresults = copy.deepcopy(results) | 121 | newresults = copy.deepcopy(results) |
| 113 | #for a in newresults2: | ||
| 114 | # newresults = newresults2[a] | ||
| 115 | for res in newresults: | 122 | for res in newresults: |
| 116 | if 'result' not in newresults[res]: | 123 | if 'result' not in newresults[res]: |
| 117 | continue | 124 | continue |
| 118 | if 'ptestresult.rawlogs' in newresults[res]['result']: | 125 | for logtype in rawlog_sections: |
| 119 | del newresults[res]['result']['ptestresult.rawlogs'] | 126 | if logtype in newresults[res]['result']: |
| 127 | del newresults[res]['result'][logtype] | ||
| 120 | if 'ptestresult.sections' in newresults[res]['result']: | 128 | if 'ptestresult.sections' in newresults[res]['result']: |
| 121 | for i in newresults[res]['result']['ptestresult.sections']: | 129 | for i in newresults[res]['result']['ptestresult.sections']: |
| 122 | if 'log' in newresults[res]['result']['ptestresult.sections'][i]: | 130 | if 'log' in newresults[res]['result']['ptestresult.sections'][i]: |
| 123 | del newresults[res]['result']['ptestresult.sections'][i]['log'] | 131 | del newresults[res]['result']['ptestresult.sections'][i]['log'] |
| 124 | return newresults | 132 | return newresults |
| 125 | 133 | ||
| 134 | # For timing numbers, crazy amounts of precision don't make sense and just confuse | ||
| 135 | # the logs. For numbers over 1, trim to 3 decimal places, for numbers less than 1, | ||
| 136 | # trim to 4 significant digits | ||
| 137 | def trim_durations(results): | ||
| 138 | for res in results: | ||
| 139 | if 'result' not in results[res]: | ||
| 140 | continue | ||
| 141 | for entry in results[res]['result']: | ||
| 142 | if 'duration' in results[res]['result'][entry]: | ||
| 143 | duration = results[res]['result'][entry]['duration'] | ||
| 144 | if duration > 1: | ||
| 145 | results[res]['result'][entry]['duration'] = float("%.3f" % duration) | ||
| 146 | elif duration < 1: | ||
| 147 | results[res]['result'][entry]['duration'] = float("%.4g" % duration) | ||
| 148 | return results | ||
| 149 | |||
| 150 | def handle_cleanups(results): | ||
| 151 | # Remove pointless path duplication from old format reproducibility results | ||
| 152 | for res2 in results: | ||
| 153 | try: | ||
| 154 | section = results[res2]['result']['reproducible']['files'] | ||
| 155 | for pkgtype in section: | ||
| 156 | for filelist in section[pkgtype].copy(): | ||
| 157 | if section[pkgtype][filelist] and type(section[pkgtype][filelist][0]) == dict: | ||
| 158 | newlist = [] | ||
| 159 | for entry in section[pkgtype][filelist]: | ||
| 160 | newlist.append(entry["reference"].split("/./")[1]) | ||
| 161 | section[pkgtype][filelist] = newlist | ||
| 162 | |||
| 163 | except KeyError: | ||
| 164 | pass | ||
| 165 | # Remove pointless duplicate rawlogs data | ||
| 166 | try: | ||
| 167 | del results[res2]['result']['reproducible.rawlogs'] | ||
| 168 | except KeyError: | ||
| 169 | pass | ||
| 170 | |||
| 126 | def decode_log(logdata): | 171 | def decode_log(logdata): |
| 127 | if isinstance(logdata, str): | 172 | if isinstance(logdata, str): |
| 128 | return logdata | 173 | return logdata |
| @@ -155,9 +200,6 @@ def generic_get_rawlogs(sectname, results): | |||
| 155 | return None | 200 | return None |
| 156 | return decode_log(results[sectname]['log']) | 201 | return decode_log(results[sectname]['log']) |
| 157 | 202 | ||
| 158 | def ptestresult_get_rawlogs(results): | ||
| 159 | return generic_get_rawlogs('ptestresult.rawlogs', results) | ||
| 160 | |||
| 161 | def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False): | 203 | def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False): |
| 162 | for res in results: | 204 | for res in results: |
| 163 | if res: | 205 | if res: |
| @@ -167,16 +209,20 @@ def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, p | |||
| 167 | os.makedirs(os.path.dirname(dst), exist_ok=True) | 209 | os.makedirs(os.path.dirname(dst), exist_ok=True) |
| 168 | resultsout = results[res] | 210 | resultsout = results[res] |
| 169 | if not ptestjson: | 211 | if not ptestjson: |
| 170 | resultsout = strip_ptestresults(results[res]) | 212 | resultsout = strip_logs(results[res]) |
| 213 | trim_durations(resultsout) | ||
| 214 | handle_cleanups(resultsout) | ||
| 171 | with open(dst, 'w') as f: | 215 | with open(dst, 'w') as f: |
| 172 | f.write(json.dumps(resultsout, sort_keys=True, indent=4)) | 216 | f.write(json.dumps(resultsout, sort_keys=True, indent=1)) |
| 173 | for res2 in results[res]: | 217 | for res2 in results[res]: |
| 174 | if ptestlogs and 'result' in results[res][res2]: | 218 | if ptestlogs and 'result' in results[res][res2]: |
| 175 | seriesresults = results[res][res2]['result'] | 219 | seriesresults = results[res][res2]['result'] |
| 176 | rawlogs = ptestresult_get_rawlogs(seriesresults) | 220 | for logtype in rawlog_sections: |
| 177 | if rawlogs is not None: | 221 | logdata = generic_get_rawlogs(logtype, seriesresults) |
| 178 | with open(dst.replace(fn, "ptest-raw.log"), "w+") as f: | 222 | if logdata is not None: |
| 179 | f.write(rawlogs) | 223 | logger.info("Extracting " + rawlog_sections[logtype] + "-raw.log") |
| 224 | with open(dst.replace(fn, rawlog_sections[logtype] + "-raw.log"), "w+") as f: | ||
| 225 | f.write(logdata) | ||
| 180 | if 'ptestresult.sections' in seriesresults: | 226 | if 'ptestresult.sections' in seriesresults: |
| 181 | for i in seriesresults['ptestresult.sections']: | 227 | for i in seriesresults['ptestresult.sections']: |
| 182 | sectionlog = ptestresult_get_log(seriesresults, i) | 228 | sectionlog = ptestresult_get_log(seriesresults, i) |
diff --git a/scripts/lib/resulttool/store.py b/scripts/lib/resulttool/store.py index e0951f0a8f..b143334e69 100644 --- a/scripts/lib/resulttool/store.py +++ b/scripts/lib/resulttool/store.py | |||
| @@ -65,18 +65,35 @@ def store(args, logger): | |||
| 65 | 65 | ||
| 66 | for r in revisions: | 66 | for r in revisions: |
| 67 | results = revisions[r] | 67 | results = revisions[r] |
| 68 | if args.revision and r[0] != args.revision: | ||
| 69 | logger.info('skipping %s as non-matching' % r[0]) | ||
| 70 | continue | ||
| 68 | keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]} | 71 | keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]} |
| 69 | subprocess.check_call(["find", tempdir, "!", "-path", "./.git/*", "-delete"]) | 72 | subprocess.check_call(["find", tempdir, "-name", "testresults.json", "!", "-path", "./.git/*", "-delete"]) |
| 70 | resultutils.save_resultsdata(results, tempdir, ptestlogs=True) | 73 | resultutils.save_resultsdata(results, tempdir, ptestlogs=True) |
| 71 | 74 | ||
| 72 | logger.info('Storing test result into git repository %s' % args.git_dir) | 75 | logger.info('Storing test result into git repository %s' % args.git_dir) |
| 73 | 76 | ||
| 74 | gitarchive.gitarchive(tempdir, args.git_dir, False, False, | 77 | excludes = [] |
| 78 | if args.logfile_archive: | ||
| 79 | excludes = ['*.log', "*.log.zst"] | ||
| 80 | |||
| 81 | tagname = gitarchive.gitarchive(tempdir, args.git_dir, False, False, | ||
| 75 | "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", | 82 | "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", |
| 76 | False, "{branch}/{commit_count}-g{commit}/{tag_number}", | 83 | False, "{branch}/{commit_count}-g{commit}/{tag_number}", |
| 77 | 'Test run #{tag_number} of {branch}:{commit}', '', | 84 | 'Test run #{tag_number} of {branch}:{commit}', '', |
| 78 | [], [], False, keywords, logger) | 85 | excludes, [], False, keywords, logger) |
| 79 | 86 | ||
| 87 | if args.logfile_archive: | ||
| 88 | logdir = args.logfile_archive + "/" + tagname | ||
| 89 | shutil.copytree(tempdir, logdir) | ||
| 90 | os.chmod(logdir, 0o755) | ||
| 91 | for root, dirs, files in os.walk(logdir): | ||
| 92 | for name in files: | ||
| 93 | if not name.endswith(".log"): | ||
| 94 | continue | ||
| 95 | f = os.path.join(root, name) | ||
| 96 | subprocess.run(["zstd", f, "--rm"], check=True, capture_output=True) | ||
| 80 | finally: | 97 | finally: |
| 81 | subprocess.check_call(["rm", "-rf", tempdir]) | 98 | subprocess.check_call(["rm", "-rf", tempdir]) |
| 82 | 99 | ||
| @@ -102,3 +119,7 @@ def register_commands(subparsers): | |||
| 102 | help='add executed-by configuration to each result file') | 119 | help='add executed-by configuration to each result file') |
| 103 | parser_build.add_argument('-t', '--extra-test-env', default='', | 120 | parser_build.add_argument('-t', '--extra-test-env', default='', |
| 104 | help='add extra test environment data to each result file configuration') | 121 | help='add extra test environment data to each result file configuration') |
| 122 | parser_build.add_argument('-r', '--revision', default='', | ||
| 123 | help='only store data for the specified revision') | ||
| 124 | parser_build.add_argument('-l', '--logfile-archive', default='', | ||
| 125 | help='directory to separately archive log files along with a copy of the results') | ||
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py index f23e53cba9..32e749dbb1 100644 --- a/scripts/lib/scriptutils.py +++ b/scripts/lib/scriptutils.py | |||
| @@ -179,8 +179,13 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr | |||
| 179 | f.write('SRCREV = "%s"\n' % srcrev) | 179 | f.write('SRCREV = "%s"\n' % srcrev) |
| 180 | f.write('PV = "0.0+"\n') | 180 | f.write('PV = "0.0+"\n') |
| 181 | f.write('WORKDIR = "%s"\n' % tmpworkdir) | 181 | f.write('WORKDIR = "%s"\n' % tmpworkdir) |
| 182 | f.write('UNPACKDIR = "%s"\n' % destdir) | ||
| 183 | |||
| 182 | # Set S out of the way so it doesn't get created under the workdir | 184 | # Set S out of the way so it doesn't get created under the workdir |
| 183 | f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) | 185 | s_dir = os.path.join(tmpdir, 'emptysrc') |
| 186 | bb.utils.mkdirhier(s_dir) | ||
| 187 | f.write('S = "%s"\n' % s_dir) | ||
| 188 | |||
| 184 | if not mirrors: | 189 | if not mirrors: |
| 185 | # We do not need PREMIRRORS since we are almost certainly | 190 | # We do not need PREMIRRORS since we are almost certainly |
| 186 | # fetching new source rather than something that has already | 191 | # fetching new source rather than something that has already |
| @@ -232,10 +237,6 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr | |||
| 232 | if e.errno != errno.ENOTEMPTY: | 237 | if e.errno != errno.ENOTEMPTY: |
| 233 | raise | 238 | raise |
| 234 | 239 | ||
| 235 | bb.utils.mkdirhier(destdir) | ||
| 236 | for fn in os.listdir(tmpworkdir): | ||
| 237 | shutil.move(os.path.join(tmpworkdir, fn), destdir) | ||
| 238 | |||
| 239 | finally: | 240 | finally: |
| 240 | if not preserve_tmp: | 241 | if not preserve_tmp: |
| 241 | shutil.rmtree(tmpdir) | 242 | shutil.rmtree(tmpdir) |
| @@ -271,12 +272,3 @@ def is_src_url(param): | |||
| 271 | return True | 272 | return True |
| 272 | return False | 273 | return False |
| 273 | 274 | ||
| 274 | def filter_src_subdirs(pth): | ||
| 275 | """ | ||
| 276 | Filter out subdirectories of initial unpacked source trees that we do not care about. | ||
| 277 | Used by devtool and recipetool. | ||
| 278 | """ | ||
| 279 | dirlist = os.listdir(pth) | ||
| 280 | filterout = ['git.indirectionsymlink', 'source-date-epoch', 'sstate-install-recipe_qa'] | ||
| 281 | dirlist = [x for x in dirlist if x not in filterout] | ||
| 282 | return dirlist | ||
diff --git a/scripts/lib/wic/canned-wks/common.wks.inc b/scripts/lib/wic/canned-wks/common.wks.inc index 89880b417b..4a440ddafe 100644 --- a/scripts/lib/wic/canned-wks/common.wks.inc +++ b/scripts/lib/wic/canned-wks/common.wks.inc | |||
| @@ -1,3 +1,3 @@ | |||
| 1 | # This file is included into 3 canned wks files from this directory | 1 | # This file is included into 3 canned wks files from this directory |
| 2 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 2 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
| 3 | part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024 | 3 | part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024 |
diff --git a/scripts/lib/wic/canned-wks/directdisk-gpt.wks b/scripts/lib/wic/canned-wks/directdisk-gpt.wks index 8d7d8de6ea..cb640056f1 100644 --- a/scripts/lib/wic/canned-wks/directdisk-gpt.wks +++ b/scripts/lib/wic/canned-wks/directdisk-gpt.wks | |||
| @@ -3,7 +3,7 @@ | |||
| 3 | # can directly dd to boot media. | 3 | # can directly dd to boot media. |
| 4 | 4 | ||
| 5 | 5 | ||
| 6 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 6 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
| 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid | 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid |
| 8 | 8 | ||
| 9 | bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8" | 9 | bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8" |
diff --git a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks index f61d941d6d..4fd1999ffb 100644 --- a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks +++ b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks | |||
| @@ -15,7 +15,7 @@ | |||
| 15 | # | 15 | # |
| 16 | # - or any combinations of -r and --rootfs command line options | 16 | # - or any combinations of -r and --rootfs command line options |
| 17 | 17 | ||
| 18 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 18 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
| 19 | part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024 | 19 | part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024 |
| 20 | part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024 | 20 | part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024 |
| 21 | 21 | ||
diff --git a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in index 2fd286ff98..5211972955 100644 --- a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in +++ b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in | |||
| @@ -1,3 +1,3 @@ | |||
| 1 | bootloader --ptable gpt | 1 | bootloader --ptable gpt |
| 2 | part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.1 | 2 | part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.2 |
| 3 | part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ | 3 | part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ |
diff --git a/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in new file mode 100644 index 0000000000..cac0fa32cd --- /dev/null +++ b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in | |||
| @@ -0,0 +1,3 @@ | |||
| 1 | bootloader --ptable gpt --timeout=5 | ||
| 2 | part /boot --source bootimg_efi --sourceparams="loader=${EFI_PROVIDER}" --label boot --active --align 1024 --use-uuid --part-name="ESP" --part-type=C12A7328-F81F-11D2-BA4B-00A0C93EC93B --overhead-factor=1 | ||
| 3 | part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ | ||
diff --git a/scripts/lib/wic/canned-wks/mkefidisk.wks b/scripts/lib/wic/canned-wks/mkefidisk.wks index 9f534fe184..16dfe76dfe 100644 --- a/scripts/lib/wic/canned-wks/mkefidisk.wks +++ b/scripts/lib/wic/canned-wks/mkefidisk.wks | |||
| @@ -2,10 +2,10 @@ | |||
| 2 | # long-description: Creates a partitioned EFI disk image that the user | 2 | # long-description: Creates a partitioned EFI disk image that the user |
| 3 | # can directly dd to boot media. | 3 | # can directly dd to boot media. |
| 4 | 4 | ||
| 5 | part /boot --source bootimg-efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024 | 5 | part /boot --source bootimg_efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024 |
| 6 | 6 | ||
| 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid | 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid |
| 8 | 8 | ||
| 9 | part swap --ondisk sda --size 44 --label swap1 --fstype=swap | 9 | part swap --ondisk sda --size 44 --label swap1 --fstype=swap |
| 10 | 10 | ||
| 11 | bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=ttyS0,115200 console=tty0" | 11 | bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=${KERNEL_CONSOLE} console=tty0" |
diff --git a/scripts/lib/wic/canned-wks/mkhybridiso.wks b/scripts/lib/wic/canned-wks/mkhybridiso.wks index 48c5ac4791..c3a030e5b4 100644 --- a/scripts/lib/wic/canned-wks/mkhybridiso.wks +++ b/scripts/lib/wic/canned-wks/mkhybridiso.wks | |||
| @@ -2,6 +2,6 @@ | |||
| 2 | # long-description: Creates an EFI and legacy bootable hybrid ISO image | 2 | # long-description: Creates an EFI and legacy bootable hybrid ISO image |
| 3 | # which can be used on optical media as well as USB media. | 3 | # which can be used on optical media as well as USB media. |
| 4 | 4 | ||
| 5 | part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO | 5 | part /boot --source isoimage_isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO |
| 6 | 6 | ||
| 7 | bootloader --timeout=15 --append="" | 7 | bootloader --timeout=15 --append="" |
diff --git a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks index 63bc4dab6a..f9f8044f7d 100644 --- a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks +++ b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks | |||
| @@ -2,5 +2,5 @@ | |||
| 2 | # long-description: Creates a partitioned SD card image. Boot files | 2 | # long-description: Creates a partitioned SD card image. Boot files |
| 3 | # are located in the first vfat partition. | 3 | # are located in the first vfat partition. |
| 4 | 4 | ||
| 5 | part /boot --source bootimg-partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16 | 5 | part /boot --source bootimg_partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16 |
| 6 | part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4 | 6 | part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4 |
diff --git a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks index 95d7b97a60..3fb2c0e35f 100644 --- a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks +++ b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks | |||
| @@ -2,7 +2,7 @@ | |||
| 2 | # long-description: Creates a partitioned EFI disk image that the user | 2 | # long-description: Creates a partitioned EFI disk image that the user |
| 3 | # can directly dd to boot media. The selected bootloader is systemd-boot. | 3 | # can directly dd to boot media. The selected bootloader is systemd-boot. |
| 4 | 4 | ||
| 5 | part /boot --source bootimg-efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid | 5 | part /boot --source bootimg_efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid |
| 6 | 6 | ||
| 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid | 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid |
| 8 | 8 | ||
diff --git a/scripts/lib/wic/engine.py b/scripts/lib/wic/engine.py index 674ccfc244..b9e60cbe4e 100644 --- a/scripts/lib/wic/engine.py +++ b/scripts/lib/wic/engine.py | |||
| @@ -180,6 +180,8 @@ def wic_create(wks_file, rootfs_dir, bootimg_dir, kernel_dir, | |||
| 180 | os.makedirs(options.outdir) | 180 | os.makedirs(options.outdir) |
| 181 | 181 | ||
| 182 | pname = options.imager | 182 | pname = options.imager |
| 183 | # Don't support '-' in plugin names | ||
| 184 | pname = pname.replace("-", "_") | ||
| 183 | plugin_class = PluginMgr.get_plugins('imager').get(pname) | 185 | plugin_class = PluginMgr.get_plugins('imager').get(pname) |
| 184 | if not plugin_class: | 186 | if not plugin_class: |
| 185 | raise WicError('Unknown plugin: %s' % pname) | 187 | raise WicError('Unknown plugin: %s' % pname) |
| @@ -232,6 +234,16 @@ class Disk: | |||
| 232 | self._psector_size = None | 234 | self._psector_size = None |
| 233 | self._ptable_format = None | 235 | self._ptable_format = None |
| 234 | 236 | ||
| 237 | # define sector size | ||
| 238 | sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE') | ||
| 239 | if sector_size_str is not None: | ||
| 240 | try: | ||
| 241 | self.sector_size = int(sector_size_str) | ||
| 242 | except ValueError: | ||
| 243 | self.sector_size = None | ||
| 244 | else: | ||
| 245 | self.sector_size = None | ||
| 246 | |||
| 235 | # find parted | 247 | # find parted |
| 236 | # read paths from $PATH environment variable | 248 | # read paths from $PATH environment variable |
| 237 | # if it fails, use hardcoded paths | 249 | # if it fails, use hardcoded paths |
| @@ -258,7 +270,13 @@ class Disk: | |||
| 258 | def get_partitions(self): | 270 | def get_partitions(self): |
| 259 | if self._partitions is None: | 271 | if self._partitions is None: |
| 260 | self._partitions = OrderedDict() | 272 | self._partitions = OrderedDict() |
| 261 | out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath)) | 273 | |
| 274 | if self.sector_size is not None: | ||
| 275 | out = exec_cmd("export PARTED_SECTOR_SIZE=%d; %s -sm %s unit B print" % \ | ||
| 276 | (self.sector_size, self.parted, self.imagepath), True) | ||
| 277 | else: | ||
| 278 | out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath)) | ||
| 279 | |||
| 262 | parttype = namedtuple("Part", "pnum start end size fstype") | 280 | parttype = namedtuple("Part", "pnum start end size fstype") |
| 263 | splitted = out.splitlines() | 281 | splitted = out.splitlines() |
| 264 | # skip over possible errors in exec_cmd output | 282 | # skip over possible errors in exec_cmd output |
| @@ -359,7 +377,7 @@ class Disk: | |||
| 359 | Remove files/dirs and their contents from the partition. | 377 | Remove files/dirs and their contents from the partition. |
| 360 | This only applies to ext* partition. | 378 | This only applies to ext* partition. |
| 361 | """ | 379 | """ |
| 362 | abs_path = re.sub('\/\/+', '/', path) | 380 | abs_path = re.sub(r'\/\/+', '/', path) |
| 363 | cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs, | 381 | cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs, |
| 364 | self._get_part_image(pnum), | 382 | self._get_part_image(pnum), |
| 365 | abs_path) | 383 | abs_path) |
diff --git a/scripts/lib/wic/help.py b/scripts/lib/wic/help.py index 163535e431..6b49a67de9 100644 --- a/scripts/lib/wic/help.py +++ b/scripts/lib/wic/help.py | |||
| @@ -544,18 +544,18 @@ DESCRIPTION | |||
| 544 | the --source param given to that partition. For example, if the | 544 | the --source param given to that partition. For example, if the |
| 545 | partition is set up like this: | 545 | partition is set up like this: |
| 546 | 546 | ||
| 547 | part /boot --source bootimg-pcbios ... | 547 | part /boot --source bootimg_pcbios ... |
| 548 | 548 | ||
| 549 | then the methods defined as class members of the plugin having the | 549 | then the methods defined as class members of the plugin having the |
| 550 | matching bootimg-pcbios .name class member would be used. | 550 | matching bootimg_pcbios .name class member would be used. |
| 551 | 551 | ||
| 552 | To be more concrete, here's the plugin definition that would match | 552 | To be more concrete, here's the plugin definition that would match |
| 553 | a '--source bootimg-pcbios' usage, along with an example method | 553 | a '--source bootimg_pcbios' usage, along with an example method |
| 554 | that would be called by the wic implementation when it needed to | 554 | that would be called by the wic implementation when it needed to |
| 555 | invoke an implementation-specific partition-preparation function: | 555 | invoke an implementation-specific partition-preparation function: |
| 556 | 556 | ||
| 557 | class BootimgPcbiosPlugin(SourcePlugin): | 557 | class BootimgPcbiosPlugin(SourcePlugin): |
| 558 | name = 'bootimg-pcbios' | 558 | name = 'bootimg_pcbios' |
| 559 | 559 | ||
| 560 | @classmethod | 560 | @classmethod |
| 561 | def do_prepare_partition(self, part, ...) | 561 | def do_prepare_partition(self, part, ...) |
| @@ -794,7 +794,7 @@ DESCRIPTION | |||
| 794 | 794 | ||
| 795 | Here is a content of test.wks: | 795 | Here is a content of test.wks: |
| 796 | 796 | ||
| 797 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 797 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
| 798 | part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024 | 798 | part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024 |
| 799 | 799 | ||
| 800 | bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0" | 800 | bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0" |
| @@ -916,6 +916,10 @@ DESCRIPTION | |||
| 916 | will create empty partition. --size parameter has | 916 | will create empty partition. --size parameter has |
| 917 | to be used to specify size of empty partition. | 917 | to be used to specify size of empty partition. |
| 918 | 918 | ||
| 919 | --sourceparams: This option is specific to wic. Supply additional | ||
| 920 | parameters to the source plugin in | ||
| 921 | key1=value1,key2 format. | ||
| 922 | |||
| 919 | --ondisk or --ondrive: Forces the partition to be created on | 923 | --ondisk or --ondrive: Forces the partition to be created on |
| 920 | a particular disk. | 924 | a particular disk. |
| 921 | 925 | ||
| @@ -932,6 +936,7 @@ DESCRIPTION | |||
| 932 | squashfs | 936 | squashfs |
| 933 | erofs | 937 | erofs |
| 934 | swap | 938 | swap |
| 939 | none | ||
| 935 | 940 | ||
| 936 | --fsoptions: Specifies a free-form string of options to be | 941 | --fsoptions: Specifies a free-form string of options to be |
| 937 | used when mounting the filesystem. This string | 942 | used when mounting the filesystem. This string |
| @@ -965,6 +970,14 @@ DESCRIPTION | |||
| 965 | to start a partition on an x KBytes | 970 | to start a partition on an x KBytes |
| 966 | boundary. | 971 | boundary. |
| 967 | 972 | ||
| 973 | --offset: This option is specific to wic that says to place a partition | ||
| 974 | at exactly the specified offset. If the partition cannot be | ||
| 975 | placed at the specified offset, the image build will fail. | ||
| 976 | Specify as an integer value optionally followed by one of the | ||
| 977 | units s/S for 512 byte sector, k/K for kibibyte, M for | ||
| 978 | mebibyte and G for gibibyte. The default unit if none is | ||
| 979 | given is k. | ||
| 980 | |||
| 968 | --no-table: This option is specific to wic. Space will be | 981 | --no-table: This option is specific to wic. Space will be |
| 969 | reserved for the partition and it will be | 982 | reserved for the partition and it will be |
| 970 | populated but it will not be added to the | 983 | populated but it will not be added to the |
| @@ -1000,12 +1013,20 @@ DESCRIPTION | |||
| 1000 | --no-fstab-update: This option is specific to wic. It does not update the | 1013 | --no-fstab-update: This option is specific to wic. It does not update the |
| 1001 | '/etc/fstab' stock file for the given partition. | 1014 | '/etc/fstab' stock file for the given partition. |
| 1002 | 1015 | ||
| 1003 | --extra-space: This option is specific to wic. It adds extra | 1016 | --extra-filesystem-space: This option is specific to wic. It adds extra |
| 1004 | space after the space filled by the content | 1017 | space after the space filled by the content |
| 1005 | of the partition. The final size can go | 1018 | of the partition. The final size can go |
| 1006 | beyond the size specified by --size. | 1019 | beyond the size specified by --size. |
| 1007 | By default, 10MB. This option cannot be used | 1020 | By default, 10MB. This option cannot be used |
| 1008 | with --fixed-size option. | 1021 | with --fixed-size option. |
| 1022 | |||
| 1023 | --extra-partition-space: This option is specific to wic. It adds extra | ||
| 1024 | empty space after the space filled by the | ||
| 1025 | filesystem. With --fixed-size, the extra | ||
| 1026 | partition space is removed from the filesystem | ||
| 1027 | size. Otherwise (with or without --size flag), | ||
| 1028 | the extra partition space is added to the final | ||
| 1029 | paritition size. The default value is 0MB. | ||
| 1009 | 1030 | ||
| 1010 | --overhead-factor: This option is specific to wic. The | 1031 | --overhead-factor: This option is specific to wic. The |
| 1011 | size of the partition is multiplied by | 1032 | size of the partition is multiplied by |
| @@ -1045,6 +1066,18 @@ DESCRIPTION | |||
| 1045 | not take effect when --mkfs-extraopts is used. This should be taken into | 1066 | not take effect when --mkfs-extraopts is used. This should be taken into |
| 1046 | account when using --mkfs-extraopts. | 1067 | account when using --mkfs-extraopts. |
| 1047 | 1068 | ||
| 1069 | --type: This option is specific to wic. Valid values are 'primary', | ||
| 1070 | 'logical'. For msdos partition tables, this option specifies | ||
| 1071 | the partition type. | ||
| 1072 | |||
| 1073 | --hidden: This option is specific to wic. This option sets the | ||
| 1074 | RequiredPartition bit (bit 0) on GPT partitions. | ||
| 1075 | |||
| 1076 | --mbr: This option is specific to wic. This option is used with the | ||
| 1077 | gpt-hybrid partition type that uses both a GPT partition and | ||
| 1078 | an MBR header. Partitions with this flag will be included in | ||
| 1079 | this MBR header. | ||
| 1080 | |||
| 1048 | * bootloader | 1081 | * bootloader |
| 1049 | 1082 | ||
| 1050 | This command allows the user to specify various bootloader | 1083 | This command allows the user to specify various bootloader |
| @@ -1063,6 +1096,13 @@ DESCRIPTION | |||
| 1063 | file. Using this option will override any other | 1096 | file. Using this option will override any other |
| 1064 | bootloader option. | 1097 | bootloader option. |
| 1065 | 1098 | ||
| 1099 | --ptable: Specifies the partition table format. Valid values are | ||
| 1100 | 'msdos', 'gpt', 'gpt-hybrid'. | ||
| 1101 | |||
| 1102 | --source: Specifies the source plugin. If not specified, the | ||
| 1103 | --source value will be copied from the partition that has | ||
| 1104 | /boot as mountpoint. | ||
| 1105 | |||
| 1066 | Note that bootloader functionality and boot partitions are | 1106 | Note that bootloader functionality and boot partitions are |
| 1067 | implemented by the various --source plugins that implement | 1107 | implemented by the various --source plugins that implement |
| 1068 | bootloader functionality; the bootloader command essentially | 1108 | bootloader functionality; the bootloader command essentially |
diff --git a/scripts/lib/wic/ksparser.py b/scripts/lib/wic/ksparser.py index 7ef3dc83dd..a762d3b6cf 100644 --- a/scripts/lib/wic/ksparser.py +++ b/scripts/lib/wic/ksparser.py | |||
| @@ -132,7 +132,7 @@ def systemidtype(arg): | |||
| 132 | class KickStart(): | 132 | class KickStart(): |
| 133 | """Kickstart parser implementation.""" | 133 | """Kickstart parser implementation.""" |
| 134 | 134 | ||
| 135 | DEFAULT_EXTRA_SPACE = 10*1024 | 135 | DEFAULT_EXTRA_FILESYSTEM_SPACE = 10*1024 |
| 136 | DEFAULT_OVERHEAD_FACTOR = 1.3 | 136 | DEFAULT_OVERHEAD_FACTOR = 1.3 |
| 137 | 137 | ||
| 138 | def __init__(self, confpath): | 138 | def __init__(self, confpath): |
| @@ -153,7 +153,8 @@ class KickStart(): | |||
| 153 | part.add_argument('--exclude-path', nargs='+') | 153 | part.add_argument('--exclude-path', nargs='+') |
| 154 | part.add_argument('--include-path', nargs='+', action='append') | 154 | part.add_argument('--include-path', nargs='+', action='append') |
| 155 | part.add_argument('--change-directory') | 155 | part.add_argument('--change-directory') |
| 156 | part.add_argument("--extra-space", type=sizetype("M")) | 156 | part.add_argument('--extra-filesystem-space', '--extra-space', type=sizetype("M")) |
| 157 | part.add_argument('--extra-partition-space', type=sizetype("M")) | ||
| 157 | part.add_argument('--fsoptions', dest='fsopts') | 158 | part.add_argument('--fsoptions', dest='fsopts') |
| 158 | part.add_argument('--fspassno', dest='fspassno') | 159 | part.add_argument('--fspassno', dest='fspassno') |
| 159 | part.add_argument('--fstype', default='vfat', | 160 | part.add_argument('--fstype', default='vfat', |
| @@ -174,9 +175,9 @@ class KickStart(): | |||
| 174 | part.add_argument('--hidden', action='store_true') | 175 | part.add_argument('--hidden', action='store_true') |
| 175 | 176 | ||
| 176 | # --size and --fixed-size cannot be specified together; options | 177 | # --size and --fixed-size cannot be specified together; options |
| 177 | # ----extra-space and --overhead-factor should also raise a parser | 178 | # ----extra-filesystem-space and --overhead-factor should also raise a |
| 178 | # --error, but since nesting mutually exclusive groups does not work, | 179 | # parser error, but since nesting mutually exclusive groups does not work, |
| 179 | # ----extra-space/--overhead-factor are handled later | 180 | # ----extra-filesystem-space/--overhead-factor are handled later |
| 180 | sizeexcl = part.add_mutually_exclusive_group() | 181 | sizeexcl = part.add_mutually_exclusive_group() |
| 181 | sizeexcl.add_argument('--size', type=sizetype("M"), default=0) | 182 | sizeexcl.add_argument('--size', type=sizetype("M"), default=0) |
| 182 | sizeexcl.add_argument('--fixed-size', type=sizetype("M"), default=0) | 183 | sizeexcl.add_argument('--fixed-size', type=sizetype("M"), default=0) |
| @@ -259,14 +260,17 @@ class KickStart(): | |||
| 259 | err = "%s:%d: Must set the label with --label" \ | 260 | err = "%s:%d: Must set the label with --label" \ |
| 260 | % (confpath, lineno) | 261 | % (confpath, lineno) |
| 261 | raise KickStartError(err) | 262 | raise KickStartError(err) |
| 263 | if not parsed.extra_partition_space: | ||
| 264 | parsed.extra_partition_space = 0 | ||
| 262 | # using ArgumentParser one cannot easily tell if option | 265 | # using ArgumentParser one cannot easily tell if option |
| 263 | # was passed as argument, if said option has a default | 266 | # was passed as argument, if said option has a default |
| 264 | # value; --overhead-factor/--extra-space cannot be used | 267 | # value; --overhead-factor/--extra-filesystem-space |
| 265 | # with --fixed-size, so at least detect when these were | 268 | # cannot be used with --fixed-size, so at least detect |
| 266 | # passed with non-0 values ... | 269 | # when these were passed with non-0 values ... |
| 267 | if parsed.fixed_size: | 270 | if parsed.fixed_size: |
| 268 | if parsed.overhead_factor or parsed.extra_space: | 271 | if parsed.overhead_factor or parsed.extra_filesystem_space: |
| 269 | err = "%s:%d: arguments --overhead-factor and --extra-space not "\ | 272 | err = "%s:%d: arguments --overhead-factor and "\ |
| 273 | "--extra-filesystem-space not "\ | ||
| 270 | "allowed with argument --fixed-size" \ | 274 | "allowed with argument --fixed-size" \ |
| 271 | % (confpath, lineno) | 275 | % (confpath, lineno) |
| 272 | raise KickStartError(err) | 276 | raise KickStartError(err) |
| @@ -275,10 +279,10 @@ class KickStart(): | |||
| 275 | # --fixed-size iff given option was not used | 279 | # --fixed-size iff given option was not used |
| 276 | # (again, one cannot tell if option was passed but | 280 | # (again, one cannot tell if option was passed but |
| 277 | # with value equal to 0) | 281 | # with value equal to 0) |
| 278 | if '--overhead-factor' not in line_args: | 282 | if not parsed.overhead_factor: |
| 279 | parsed.overhead_factor = self.DEFAULT_OVERHEAD_FACTOR | 283 | parsed.overhead_factor = self.DEFAULT_OVERHEAD_FACTOR |
| 280 | if '--extra-space' not in line_args: | 284 | if not parsed.extra_filesystem_space: |
| 281 | parsed.extra_space = self.DEFAULT_EXTRA_SPACE | 285 | parsed.extra_filesystem_space = self.DEFAULT_EXTRA_FILESYSTEM_SPACE |
| 282 | 286 | ||
| 283 | self.partnum += 1 | 287 | self.partnum += 1 |
| 284 | self.partitions.append(Partition(parsed, self.partnum)) | 288 | self.partitions.append(Partition(parsed, self.partnum)) |
diff --git a/scripts/lib/wic/partition.py b/scripts/lib/wic/partition.py index 795707ec5d..531ac6eb3d 100644 --- a/scripts/lib/wic/partition.py +++ b/scripts/lib/wic/partition.py | |||
| @@ -28,7 +28,8 @@ class Partition(): | |||
| 28 | self.align = args.align | 28 | self.align = args.align |
| 29 | self.disk = args.disk | 29 | self.disk = args.disk |
| 30 | self.device = None | 30 | self.device = None |
| 31 | self.extra_space = args.extra_space | 31 | self.extra_filesystem_space = args.extra_filesystem_space |
| 32 | self.extra_partition_space = args.extra_partition_space | ||
| 32 | self.exclude_path = args.exclude_path | 33 | self.exclude_path = args.exclude_path |
| 33 | self.include_path = args.include_path | 34 | self.include_path = args.include_path |
| 34 | self.change_directory = args.change_directory | 35 | self.change_directory = args.change_directory |
| @@ -91,21 +92,20 @@ class Partition(): | |||
| 91 | def get_rootfs_size(self, actual_rootfs_size=0): | 92 | def get_rootfs_size(self, actual_rootfs_size=0): |
| 92 | """ | 93 | """ |
| 93 | Calculate the required size of rootfs taking into consideration | 94 | Calculate the required size of rootfs taking into consideration |
| 94 | --size/--fixed-size flags as well as overhead and extra space, as | 95 | --size/--fixed-size and --extra-partition-space flags as well as overhead |
| 95 | specified in kickstart file. Raises an error if the | 96 | and extra space, as specified in kickstart file. Raises an error |
| 96 | `actual_rootfs_size` is larger than fixed-size rootfs. | 97 | if the `actual_rootfs_size` is larger than fixed-size rootfs. |
| 97 | |||
| 98 | """ | 98 | """ |
| 99 | if self.fixed_size: | 99 | if self.fixed_size: |
| 100 | rootfs_size = self.fixed_size | 100 | rootfs_size = self.fixed_size - self.extra_partition_space |
| 101 | if actual_rootfs_size > rootfs_size: | 101 | if actual_rootfs_size > rootfs_size: |
| 102 | raise WicError("Actual rootfs size (%d kB) is larger than " | 102 | raise WicError("Actual rootfs size (%d kB) is larger than " |
| 103 | "allowed size %d kB" % | 103 | "allowed size %d kB" % |
| 104 | (actual_rootfs_size, rootfs_size)) | 104 | (actual_rootfs_size, rootfs_size)) |
| 105 | else: | 105 | else: |
| 106 | extra_blocks = self.get_extra_block_count(actual_rootfs_size) | 106 | extra_blocks = self.get_extra_block_count(actual_rootfs_size) |
| 107 | if extra_blocks < self.extra_space: | 107 | if extra_blocks < self.extra_filesystem_space: |
| 108 | extra_blocks = self.extra_space | 108 | extra_blocks = self.extra_filesystem_space |
| 109 | 109 | ||
| 110 | rootfs_size = actual_rootfs_size + extra_blocks | 110 | rootfs_size = actual_rootfs_size + extra_blocks |
| 111 | rootfs_size = int(rootfs_size * self.overhead_factor) | 111 | rootfs_size = int(rootfs_size * self.overhead_factor) |
| @@ -119,10 +119,18 @@ class Partition(): | |||
| 119 | def disk_size(self): | 119 | def disk_size(self): |
| 120 | """ | 120 | """ |
| 121 | Obtain on-disk size of partition taking into consideration | 121 | Obtain on-disk size of partition taking into consideration |
| 122 | --size/--fixed-size options. | 122 | --size/--fixed-size and --extra-partition-space options. |
| 123 | |||
| 124 | """ | ||
| 125 | return self.fixed_size if self.fixed_size else self.size + self.extra_partition_space | ||
| 123 | 126 | ||
| 127 | @property | ||
| 128 | def fs_size(self): | ||
| 129 | """ | ||
| 130 | Obtain on-disk size of filesystem inside the partition taking into | ||
| 131 | consideration --size/--fixed-size and --extra-partition-space options. | ||
| 124 | """ | 132 | """ |
| 125 | return self.fixed_size if self.fixed_size else self.size | 133 | return self.fixed_size - self.extra_partition_space if self.fixed_size else self.size |
| 126 | 134 | ||
| 127 | def prepare(self, creator, cr_workdir, oe_builddir, rootfs_dir, | 135 | def prepare(self, creator, cr_workdir, oe_builddir, rootfs_dir, |
| 128 | bootimg_dir, kernel_dir, native_sysroot, updated_fstab_path): | 136 | bootimg_dir, kernel_dir, native_sysroot, updated_fstab_path): |
| @@ -164,10 +172,13 @@ class Partition(): | |||
| 164 | 172 | ||
| 165 | plugins = PluginMgr.get_plugins('source') | 173 | plugins = PluginMgr.get_plugins('source') |
| 166 | 174 | ||
| 175 | # Don't support '-' in plugin names | ||
| 176 | self.source = self.source.replace("-", "_") | ||
| 177 | |||
| 167 | if self.source not in plugins: | 178 | if self.source not in plugins: |
| 168 | raise WicError("The '%s' --source specified for %s doesn't exist.\n\t" | 179 | raise WicError("The '%s' --source specified for %s doesn't exist.\n\t" |
| 169 | "See 'wic list source-plugins' for a list of available" | 180 | "See 'wic list source-plugins' for a list of available" |
| 170 | " --sources.\n\tSee 'wic help source-plugins' for " | 181 | " --sources.\n\tSee 'wic help plugins' for " |
| 171 | "details on adding a new source plugin." % | 182 | "details on adding a new source plugin." % |
| 172 | (self.source, self.mountpoint)) | 183 | (self.source, self.mountpoint)) |
| 173 | 184 | ||
| @@ -178,7 +189,7 @@ class Partition(): | |||
| 178 | splitted = self.sourceparams.split(',') | 189 | splitted = self.sourceparams.split(',') |
| 179 | srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par) | 190 | srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par) |
| 180 | 191 | ||
| 181 | plugin = PluginMgr.get_plugins('source')[self.source] | 192 | plugin = plugins[self.source] |
| 182 | plugin.do_configure_partition(self, srcparams_dict, creator, | 193 | plugin.do_configure_partition(self, srcparams_dict, creator, |
| 183 | cr_workdir, oe_builddir, bootimg_dir, | 194 | cr_workdir, oe_builddir, bootimg_dir, |
| 184 | kernel_dir, native_sysroot) | 195 | kernel_dir, native_sysroot) |
| @@ -199,10 +210,10 @@ class Partition(): | |||
| 199 | "This a bug in source plugin %s and needs to be fixed." % | 210 | "This a bug in source plugin %s and needs to be fixed." % |
| 200 | (self.mountpoint, self.source)) | 211 | (self.mountpoint, self.source)) |
| 201 | 212 | ||
| 202 | if self.fixed_size and self.size > self.fixed_size: | 213 | if self.fixed_size and self.size + self.extra_partition_space > self.fixed_size: |
| 203 | raise WicError("File system image of partition %s is " | 214 | raise WicError("File system image of partition %s is " |
| 204 | "larger (%d kB) than its allowed size %d kB" % | 215 | "larger (%d kB + %d kB extra part space) than its allowed size %d kB" % |
| 205 | (self.mountpoint, self.size, self.fixed_size)) | 216 | (self.mountpoint, self.size, self.extra_partition_space, self.fixed_size)) |
| 206 | 217 | ||
| 207 | def prepare_rootfs(self, cr_workdir, oe_builddir, rootfs_dir, | 218 | def prepare_rootfs(self, cr_workdir, oe_builddir, rootfs_dir, |
| 208 | native_sysroot, real_rootfs = True, pseudo_dir = None): | 219 | native_sysroot, real_rootfs = True, pseudo_dir = None): |
| @@ -222,19 +233,19 @@ class Partition(): | |||
| 222 | if (pseudo_dir): | 233 | if (pseudo_dir): |
| 223 | # Canonicalize the ignore paths. This corresponds to | 234 | # Canonicalize the ignore paths. This corresponds to |
| 224 | # calling oe.path.canonicalize(), which is used in bitbake.conf. | 235 | # calling oe.path.canonicalize(), which is used in bitbake.conf. |
| 225 | ignore_paths = [rootfs] + (get_bitbake_var("PSEUDO_IGNORE_PATHS") or "").split(",") | 236 | include_paths = [rootfs_dir] + (get_bitbake_var("PSEUDO_INCLUDE_PATHS") or "").split(",") |
| 226 | canonical_paths = [] | 237 | canonical_paths = [] |
| 227 | for path in ignore_paths: | 238 | for path in include_paths: |
| 228 | if "$" not in path: | 239 | if "$" not in path: |
| 229 | trailing_slash = path.endswith("/") and "/" or "" | 240 | trailing_slash = path.endswith("/") and "/" or "" |
| 230 | canonical_paths.append(os.path.realpath(path) + trailing_slash) | 241 | canonical_paths.append(os.path.realpath(path) + trailing_slash) |
| 231 | ignore_paths = ",".join(canonical_paths) | 242 | include_paths = ",".join(canonical_paths) |
| 232 | 243 | ||
| 233 | pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix | 244 | pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix |
| 234 | pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir | 245 | pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir |
| 235 | pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir | 246 | pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir |
| 236 | pseudo += "export PSEUDO_NOSYMLINKEXP=1;" | 247 | pseudo += "export PSEUDO_NOSYMLINKEXP=1;" |
| 237 | pseudo += "export PSEUDO_IGNORE_PATHS=%s;" % ignore_paths | 248 | pseudo += "export PSEUDO_INCLUDE_PATHS=%s;" % include_paths |
| 238 | pseudo += "%s " % get_bitbake_var("FAKEROOTCMD") | 249 | pseudo += "%s " % get_bitbake_var("FAKEROOTCMD") |
| 239 | else: | 250 | else: |
| 240 | pseudo = None | 251 | pseudo = None |
| @@ -244,7 +255,7 @@ class Partition(): | |||
| 244 | # from bitbake variable | 255 | # from bitbake variable |
| 245 | rsize_bb = get_bitbake_var('ROOTFS_SIZE') | 256 | rsize_bb = get_bitbake_var('ROOTFS_SIZE') |
| 246 | rdir = get_bitbake_var('IMAGE_ROOTFS') | 257 | rdir = get_bitbake_var('IMAGE_ROOTFS') |
| 247 | if rsize_bb and rdir == rootfs_dir: | 258 | if rsize_bb and (rdir == rootfs_dir or (rootfs_dir.split('/')[-2] == "tmp-wic" and rootfs_dir.split('/')[-1][:6] == "rootfs")): |
| 248 | # Bitbake variable ROOTFS_SIZE is calculated in | 259 | # Bitbake variable ROOTFS_SIZE is calculated in |
| 249 | # Image._get_rootfs_size method from meta/lib/oe/image.py | 260 | # Image._get_rootfs_size method from meta/lib/oe/image.py |
| 250 | # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT, | 261 | # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT, |
| @@ -284,19 +295,8 @@ class Partition(): | |||
| 284 | 295 | ||
| 285 | extraopts = self.mkfs_extraopts or "-F -i 8192" | 296 | extraopts = self.mkfs_extraopts or "-F -i 8192" |
| 286 | 297 | ||
| 287 | if os.getenv('SOURCE_DATE_EPOCH'): | 298 | # use hash_seed to generate reproducible ext4 images |
| 288 | sde_time = int(os.getenv('SOURCE_DATE_EPOCH')) | 299 | (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, pseudo) |
| 289 | if pseudo: | ||
| 290 | pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo) | ||
| 291 | else: | ||
| 292 | pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time | ||
| 293 | |||
| 294 | # Set hash_seed to generate deterministic directory indexes | ||
| 295 | namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460") | ||
| 296 | if self.fsuuid: | ||
| 297 | namespace = uuid.UUID(self.fsuuid) | ||
| 298 | hash_seed = str(uuid.uuid5(namespace, str(sde_time))) | ||
| 299 | extraopts += " -E hash_seed=%s" % hash_seed | ||
| 300 | 300 | ||
| 301 | label_str = "" | 301 | label_str = "" |
| 302 | if self.label: | 302 | if self.label: |
| @@ -344,6 +344,23 @@ class Partition(): | |||
| 344 | 344 | ||
| 345 | self.check_for_Y2038_problem(rootfs, native_sysroot) | 345 | self.check_for_Y2038_problem(rootfs, native_sysroot) |
| 346 | 346 | ||
| 347 | def get_hash_seed_ext4(self, extraopts, pseudo): | ||
| 348 | if os.getenv('SOURCE_DATE_EPOCH'): | ||
| 349 | sde_time = int(os.getenv('SOURCE_DATE_EPOCH')) | ||
| 350 | if pseudo: | ||
| 351 | pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo) | ||
| 352 | else: | ||
| 353 | pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time | ||
| 354 | |||
| 355 | # Set hash_seed to generate deterministic directory indexes | ||
| 356 | namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460") | ||
| 357 | if self.fsuuid: | ||
| 358 | namespace = uuid.UUID(self.fsuuid) | ||
| 359 | hash_seed = str(uuid.uuid5(namespace, str(sde_time))) | ||
| 360 | extraopts += " -E hash_seed=%s" % hash_seed | ||
| 361 | |||
| 362 | return (extraopts, pseudo) | ||
| 363 | |||
| 347 | def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, | 364 | def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, |
| 348 | native_sysroot, pseudo): | 365 | native_sysroot, pseudo): |
| 349 | """ | 366 | """ |
| @@ -431,19 +448,22 @@ class Partition(): | |||
| 431 | """ | 448 | """ |
| 432 | Prepare an empty ext2/3/4 partition. | 449 | Prepare an empty ext2/3/4 partition. |
| 433 | """ | 450 | """ |
| 434 | size = self.disk_size | 451 | size = self.fs_size |
| 435 | with open(rootfs, 'w') as sparse: | 452 | with open(rootfs, 'w') as sparse: |
| 436 | os.ftruncate(sparse.fileno(), size * 1024) | 453 | os.ftruncate(sparse.fileno(), size * 1024) |
| 437 | 454 | ||
| 438 | extraopts = self.mkfs_extraopts or "-i 8192" | 455 | extraopts = self.mkfs_extraopts or "-i 8192" |
| 439 | 456 | ||
| 457 | # use hash_seed to generate reproducible ext4 images | ||
| 458 | (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, None) | ||
| 459 | |||
| 440 | label_str = "" | 460 | label_str = "" |
| 441 | if self.label: | 461 | if self.label: |
| 442 | label_str = "-L %s" % self.label | 462 | label_str = "-L %s" % self.label |
| 443 | 463 | ||
| 444 | mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \ | 464 | mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \ |
| 445 | (self.fstype, extraopts, label_str, self.fsuuid, rootfs) | 465 | (self.fstype, extraopts, label_str, self.fsuuid, rootfs) |
| 446 | exec_native_cmd(mkfs_cmd, native_sysroot) | 466 | exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) |
| 447 | 467 | ||
| 448 | self.check_for_Y2038_problem(rootfs, native_sysroot) | 468 | self.check_for_Y2038_problem(rootfs, native_sysroot) |
| 449 | 469 | ||
| @@ -452,7 +472,7 @@ class Partition(): | |||
| 452 | """ | 472 | """ |
| 453 | Prepare an empty btrfs partition. | 473 | Prepare an empty btrfs partition. |
| 454 | """ | 474 | """ |
| 455 | size = self.disk_size | 475 | size = self.fs_size |
| 456 | with open(rootfs, 'w') as sparse: | 476 | with open(rootfs, 'w') as sparse: |
| 457 | os.ftruncate(sparse.fileno(), size * 1024) | 477 | os.ftruncate(sparse.fileno(), size * 1024) |
| 458 | 478 | ||
| @@ -470,7 +490,7 @@ class Partition(): | |||
| 470 | """ | 490 | """ |
| 471 | Prepare an empty vfat partition. | 491 | Prepare an empty vfat partition. |
| 472 | """ | 492 | """ |
| 473 | blocks = self.disk_size | 493 | blocks = self.fs_size |
| 474 | 494 | ||
| 475 | label_str = "-n boot" | 495 | label_str = "-n boot" |
| 476 | if self.label: | 496 | if self.label: |
diff --git a/scripts/lib/wic/pluginbase.py b/scripts/lib/wic/pluginbase.py index b64568339b..640da292d3 100644 --- a/scripts/lib/wic/pluginbase.py +++ b/scripts/lib/wic/pluginbase.py | |||
| @@ -44,7 +44,7 @@ class PluginMgr: | |||
| 44 | path = os.path.join(layer_path, script_plugin_dir) | 44 | path = os.path.join(layer_path, script_plugin_dir) |
| 45 | path = os.path.abspath(os.path.expanduser(path)) | 45 | path = os.path.abspath(os.path.expanduser(path)) |
| 46 | if path not in cls._plugin_dirs and os.path.isdir(path): | 46 | if path not in cls._plugin_dirs and os.path.isdir(path): |
| 47 | cls._plugin_dirs.insert(0, path) | 47 | cls._plugin_dirs.append(path) |
| 48 | 48 | ||
| 49 | if ptype not in PLUGINS: | 49 | if ptype not in PLUGINS: |
| 50 | # load all ptype plugins | 50 | # load all ptype plugins |
diff --git a/scripts/lib/wic/plugins/imager/direct.py b/scripts/lib/wic/plugins/imager/direct.py index a1d152659b..6e1f1c8cba 100644 --- a/scripts/lib/wic/plugins/imager/direct.py +++ b/scripts/lib/wic/plugins/imager/direct.py | |||
| @@ -203,6 +203,8 @@ class DirectPlugin(ImagerPlugin): | |||
| 203 | source_plugin = self.ks.bootloader.source | 203 | source_plugin = self.ks.bootloader.source |
| 204 | disk_name = self.parts[0].disk | 204 | disk_name = self.parts[0].disk |
| 205 | if source_plugin: | 205 | if source_plugin: |
| 206 | # Don't support '-' in plugin names | ||
| 207 | source_plugin = source_plugin.replace("-", "_") | ||
| 206 | plugin = PluginMgr.get_plugins('source')[source_plugin] | 208 | plugin = PluginMgr.get_plugins('source')[source_plugin] |
| 207 | plugin.do_install_disk(self._image, disk_name, self, self.workdir, | 209 | plugin.do_install_disk(self._image, disk_name, self, self.workdir, |
| 208 | self.oe_builddir, self.bootimg_dir, | 210 | self.oe_builddir, self.bootimg_dir, |
| @@ -321,7 +323,15 @@ class PartitionedImage(): | |||
| 321 | self.partitions = partitions | 323 | self.partitions = partitions |
| 322 | self.partimages = [] | 324 | self.partimages = [] |
| 323 | # Size of a sector used in calculations | 325 | # Size of a sector used in calculations |
| 324 | self.sector_size = SECTOR_SIZE | 326 | sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE') |
| 327 | if sector_size_str is not None: | ||
| 328 | try: | ||
| 329 | self.sector_size = int(sector_size_str) | ||
| 330 | except ValueError: | ||
| 331 | self.sector_size = SECTOR_SIZE | ||
| 332 | else: | ||
| 333 | self.sector_size = SECTOR_SIZE | ||
| 334 | |||
| 325 | self.native_sysroot = native_sysroot | 335 | self.native_sysroot = native_sysroot |
| 326 | num_real_partitions = len([p for p in self.partitions if not p.no_table]) | 336 | num_real_partitions = len([p for p in self.partitions if not p.no_table]) |
| 327 | self.extra_space = extra_space | 337 | self.extra_space = extra_space |
| @@ -508,7 +518,8 @@ class PartitionedImage(): | |||
| 508 | logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors", | 518 | logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors", |
| 509 | parttype, start, end, size) | 519 | parttype, start, end, size) |
| 510 | 520 | ||
| 511 | cmd = "parted -s %s unit s mkpart %s" % (device, parttype) | 521 | cmd = "export PARTED_SECTOR_SIZE=%d; parted -s %s unit s mkpart %s" % \ |
| 522 | (self.sector_size, device, parttype) | ||
| 512 | if fstype: | 523 | if fstype: |
| 513 | cmd += " %s" % fstype | 524 | cmd += " %s" % fstype |
| 514 | cmd += " %d %d" % (start, end) | 525 | cmd += " %d %d" % (start, end) |
| @@ -527,8 +538,8 @@ class PartitionedImage(): | |||
| 527 | os.ftruncate(sparse.fileno(), min_size) | 538 | os.ftruncate(sparse.fileno(), min_size) |
| 528 | 539 | ||
| 529 | logger.debug("Initializing partition table for %s", device) | 540 | logger.debug("Initializing partition table for %s", device) |
| 530 | exec_native_cmd("parted -s %s mklabel %s" % (device, ptable_format), | 541 | exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s mklabel %s" % |
| 531 | self.native_sysroot) | 542 | (self.sector_size, device, ptable_format), self.native_sysroot) |
| 532 | 543 | ||
| 533 | def _write_disk_guid(self): | 544 | def _write_disk_guid(self): |
| 534 | if self.ptable_format in ('gpt', 'gpt-hybrid'): | 545 | if self.ptable_format in ('gpt', 'gpt-hybrid'): |
| @@ -538,7 +549,8 @@ class PartitionedImage(): | |||
| 538 | self.disk_guid = uuid.uuid4() | 549 | self.disk_guid = uuid.uuid4() |
| 539 | 550 | ||
| 540 | logger.debug("Set disk guid %s", self.disk_guid) | 551 | logger.debug("Set disk guid %s", self.disk_guid) |
| 541 | sfdisk_cmd = "sfdisk --disk-id %s %s" % (self.path, self.disk_guid) | 552 | sfdisk_cmd = "sfdisk --sector-size %s --disk-id %s %s" % \ |
| 553 | (self.sector_size, self.path, self.disk_guid) | ||
| 542 | exec_native_cmd(sfdisk_cmd, self.native_sysroot) | 554 | exec_native_cmd(sfdisk_cmd, self.native_sysroot) |
| 543 | 555 | ||
| 544 | def create(self): | 556 | def create(self): |
| @@ -613,45 +625,44 @@ class PartitionedImage(): | |||
| 613 | partition_label = part.part_name if part.part_name else part.label | 625 | partition_label = part.part_name if part.part_name else part.label |
| 614 | logger.debug("partition %d: set name to %s", | 626 | logger.debug("partition %d: set name to %s", |
| 615 | part.num, partition_label) | 627 | part.num, partition_label) |
| 616 | exec_native_cmd("sgdisk --change-name=%d:%s %s" % \ | 628 | exec_native_cmd("sfdisk --sector-size %s --part-label %s %d %s" % \ |
| 617 | (part.num, partition_label, | 629 | (self.sector_size, self.path, part.num, |
| 618 | self.path), self.native_sysroot) | 630 | partition_label), self.native_sysroot) |
| 619 | |||
| 620 | if part.part_type: | 631 | if part.part_type: |
| 621 | logger.debug("partition %d: set type UID to %s", | 632 | logger.debug("partition %d: set type UID to %s", |
| 622 | part.num, part.part_type) | 633 | part.num, part.part_type) |
| 623 | exec_native_cmd("sgdisk --typecode=%d:%s %s" % \ | 634 | exec_native_cmd("sfdisk --sector-size %s --part-type %s %d %s" % \ |
| 624 | (part.num, part.part_type, | 635 | (self.sector_size, self.path, part.num, |
| 625 | self.path), self.native_sysroot) | 636 | part.part_type), self.native_sysroot) |
| 626 | 637 | ||
| 627 | if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"): | 638 | if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"): |
| 628 | logger.debug("partition %d: set UUID to %s", | 639 | logger.debug("partition %d: set UUID to %s", |
| 629 | part.num, part.uuid) | 640 | part.num, part.uuid) |
| 630 | exec_native_cmd("sgdisk --partition-guid=%d:%s %s" % \ | 641 | exec_native_cmd("sfdisk --sector-size %s --part-uuid %s %d %s" % \ |
| 631 | (part.num, part.uuid, self.path), | 642 | (self.sector_size, self.path, part.num, part.uuid), |
| 632 | self.native_sysroot) | 643 | self.native_sysroot) |
| 633 | 644 | ||
| 634 | if part.active: | 645 | if part.active: |
| 635 | flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot" | 646 | flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot" |
| 636 | logger.debug("Set '%s' flag for partition '%s' on disk '%s'", | 647 | logger.debug("Set '%s' flag for partition '%s' on disk '%s'", |
| 637 | flag_name, part.num, self.path) | 648 | flag_name, part.num, self.path) |
| 638 | exec_native_cmd("parted -s %s set %d %s on" % \ | 649 | exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \ |
| 639 | (self.path, part.num, flag_name), | 650 | (self.sector_size, self.path, part.num, flag_name), |
| 640 | self.native_sysroot) | 651 | self.native_sysroot) |
| 641 | if self.ptable_format == 'gpt-hybrid' and part.mbr: | 652 | if self.ptable_format == 'gpt-hybrid' and part.mbr: |
| 642 | exec_native_cmd("parted -s %s set %d %s on" % \ | 653 | exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \ |
| 643 | (mbr_path, hybrid_mbr_part_num, "boot"), | 654 | (self.sector_size, mbr_path, hybrid_mbr_part_num, "boot"), |
| 644 | self.native_sysroot) | 655 | self.native_sysroot) |
| 645 | if part.system_id: | 656 | if part.system_id: |
| 646 | exec_native_cmd("sfdisk --part-type %s %s %s" % \ | 657 | exec_native_cmd("sfdisk --sector-size %s --part-type %s %s %s" % \ |
| 647 | (self.path, part.num, part.system_id), | 658 | (self.sector_size, self.path, part.num, part.system_id), |
| 648 | self.native_sysroot) | 659 | self.native_sysroot) |
| 649 | 660 | ||
| 650 | if part.hidden and self.ptable_format == "gpt": | 661 | if part.hidden and self.ptable_format == "gpt": |
| 651 | logger.debug("Set hidden attribute for partition '%s' on disk '%s'", | 662 | logger.debug("Set hidden attribute for partition '%s' on disk '%s'", |
| 652 | part.num, self.path) | 663 | part.num, self.path) |
| 653 | exec_native_cmd("sfdisk --part-attrs %s %s RequiredPartition" % \ | 664 | exec_native_cmd("sfdisk --sector-size %s --part-attrs %s %s RequiredPartition" % \ |
| 654 | (self.path, part.num), | 665 | (self.sector_size, self.path, part.num), |
| 655 | self.native_sysroot) | 666 | self.native_sysroot) |
| 656 | 667 | ||
| 657 | if self.ptable_format == "gpt-hybrid": | 668 | if self.ptable_format == "gpt-hybrid": |
| @@ -664,7 +675,8 @@ class PartitionedImage(): | |||
| 664 | # create with an arbitrary type, then change it to the correct type | 675 | # create with an arbitrary type, then change it to the correct type |
| 665 | # with sfdisk | 676 | # with sfdisk |
| 666 | self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD) | 677 | self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD) |
| 667 | exec_native_cmd("sfdisk --part-type %s %d 0xee" % (mbr_path, hybrid_mbr_part_num), | 678 | exec_native_cmd("sfdisk --sector-size %s --part-type %s %d 0xee" % \ |
| 679 | (self.sector_size, mbr_path, hybrid_mbr_part_num), | ||
| 668 | self.native_sysroot) | 680 | self.native_sysroot) |
| 669 | 681 | ||
| 670 | # Copy hybrid MBR | 682 | # Copy hybrid MBR |
diff --git a/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/scripts/lib/wic/plugins/source/bootimg-pcbios.py deleted file mode 100644 index a207a83530..0000000000 --- a/scripts/lib/wic/plugins/source/bootimg-pcbios.py +++ /dev/null | |||
| @@ -1,209 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright (c) 2014, Intel Corporation. | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | # DESCRIPTION | ||
| 7 | # This implements the 'bootimg-pcbios' source plugin class for 'wic' | ||
| 8 | # | ||
| 9 | # AUTHORS | ||
| 10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> | ||
| 11 | # | ||
| 12 | |||
| 13 | import logging | ||
| 14 | import os | ||
| 15 | import re | ||
| 16 | |||
| 17 | from wic import WicError | ||
| 18 | from wic.engine import get_custom_config | ||
| 19 | from wic.pluginbase import SourcePlugin | ||
| 20 | from wic.misc import (exec_cmd, exec_native_cmd, | ||
| 21 | get_bitbake_var, BOOTDD_EXTRA_SPACE) | ||
| 22 | |||
| 23 | logger = logging.getLogger('wic') | ||
| 24 | |||
| 25 | class BootimgPcbiosPlugin(SourcePlugin): | ||
| 26 | """ | ||
| 27 | Create MBR boot partition and install syslinux on it. | ||
| 28 | """ | ||
| 29 | |||
| 30 | name = 'bootimg-pcbios' | ||
| 31 | |||
| 32 | @classmethod | ||
| 33 | def _get_bootimg_dir(cls, bootimg_dir, dirname): | ||
| 34 | """ | ||
| 35 | Check if dirname exists in default bootimg_dir or in STAGING_DIR. | ||
| 36 | """ | ||
| 37 | staging_datadir = get_bitbake_var("STAGING_DATADIR") | ||
| 38 | for result in (bootimg_dir, staging_datadir): | ||
| 39 | if os.path.exists("%s/%s" % (result, dirname)): | ||
| 40 | return result | ||
| 41 | |||
| 42 | # STAGING_DATADIR is expanded with MLPREFIX if multilib is enabled | ||
| 43 | # but dependency syslinux is still populated to original STAGING_DATADIR | ||
| 44 | nonarch_datadir = re.sub('/[^/]*recipe-sysroot', '/recipe-sysroot', staging_datadir) | ||
| 45 | if os.path.exists(os.path.join(nonarch_datadir, dirname)): | ||
| 46 | return nonarch_datadir | ||
| 47 | |||
| 48 | raise WicError("Couldn't find correct bootimg_dir, exiting") | ||
| 49 | |||
| 50 | @classmethod | ||
| 51 | def do_install_disk(cls, disk, disk_name, creator, workdir, oe_builddir, | ||
| 52 | bootimg_dir, kernel_dir, native_sysroot): | ||
| 53 | """ | ||
| 54 | Called after all partitions have been prepared and assembled into a | ||
| 55 | disk image. In this case, we install the MBR. | ||
| 56 | """ | ||
| 57 | bootimg_dir = cls._get_bootimg_dir(bootimg_dir, 'syslinux') | ||
| 58 | mbrfile = "%s/syslinux/" % bootimg_dir | ||
| 59 | if creator.ptable_format == 'msdos': | ||
| 60 | mbrfile += "mbr.bin" | ||
| 61 | elif creator.ptable_format == 'gpt': | ||
| 62 | mbrfile += "gptmbr.bin" | ||
| 63 | else: | ||
| 64 | raise WicError("Unsupported partition table: %s" % | ||
| 65 | creator.ptable_format) | ||
| 66 | |||
| 67 | if not os.path.exists(mbrfile): | ||
| 68 | raise WicError("Couldn't find %s. If using the -e option, do you " | ||
| 69 | "have the right MACHINE set in local.conf? If not, " | ||
| 70 | "is the bootimg_dir path correct?" % mbrfile) | ||
| 71 | |||
| 72 | full_path = creator._full_path(workdir, disk_name, "direct") | ||
| 73 | logger.debug("Installing MBR on disk %s as %s with size %s bytes", | ||
| 74 | disk_name, full_path, disk.min_size) | ||
| 75 | |||
| 76 | dd_cmd = "dd if=%s of=%s conv=notrunc" % (mbrfile, full_path) | ||
| 77 | exec_cmd(dd_cmd, native_sysroot) | ||
| 78 | |||
| 79 | @classmethod | ||
| 80 | def do_configure_partition(cls, part, source_params, creator, cr_workdir, | ||
| 81 | oe_builddir, bootimg_dir, kernel_dir, | ||
| 82 | native_sysroot): | ||
| 83 | """ | ||
| 84 | Called before do_prepare_partition(), creates syslinux config | ||
| 85 | """ | ||
| 86 | hdddir = "%s/hdd/boot" % cr_workdir | ||
| 87 | |||
| 88 | install_cmd = "install -d %s" % hdddir | ||
| 89 | exec_cmd(install_cmd) | ||
| 90 | |||
| 91 | bootloader = creator.ks.bootloader | ||
| 92 | |||
| 93 | custom_cfg = None | ||
| 94 | if bootloader.configfile: | ||
| 95 | custom_cfg = get_custom_config(bootloader.configfile) | ||
| 96 | if custom_cfg: | ||
| 97 | # Use a custom configuration for grub | ||
| 98 | syslinux_conf = custom_cfg | ||
| 99 | logger.debug("Using custom configuration file %s " | ||
| 100 | "for syslinux.cfg", bootloader.configfile) | ||
| 101 | else: | ||
| 102 | raise WicError("configfile is specified but failed to " | ||
| 103 | "get it from %s." % bootloader.configfile) | ||
| 104 | |||
| 105 | if not custom_cfg: | ||
| 106 | # Create syslinux configuration using parameters from wks file | ||
| 107 | splash = os.path.join(cr_workdir, "/hdd/boot/splash.jpg") | ||
| 108 | if os.path.exists(splash): | ||
| 109 | splashline = "menu background splash.jpg" | ||
| 110 | else: | ||
| 111 | splashline = "" | ||
| 112 | |||
| 113 | syslinux_conf = "" | ||
| 114 | syslinux_conf += "PROMPT 0\n" | ||
| 115 | syslinux_conf += "TIMEOUT " + str(bootloader.timeout) + "\n" | ||
| 116 | syslinux_conf += "\n" | ||
| 117 | syslinux_conf += "ALLOWOPTIONS 1\n" | ||
| 118 | syslinux_conf += "SERIAL 0 115200\n" | ||
| 119 | syslinux_conf += "\n" | ||
| 120 | if splashline: | ||
| 121 | syslinux_conf += "%s\n" % splashline | ||
| 122 | syslinux_conf += "DEFAULT boot\n" | ||
| 123 | syslinux_conf += "LABEL boot\n" | ||
| 124 | |||
| 125 | kernel = "/" + get_bitbake_var("KERNEL_IMAGETYPE") | ||
| 126 | syslinux_conf += "KERNEL " + kernel + "\n" | ||
| 127 | |||
| 128 | syslinux_conf += "APPEND label=boot root=%s %s\n" % \ | ||
| 129 | (creator.rootdev, bootloader.append) | ||
| 130 | |||
| 131 | logger.debug("Writing syslinux config %s/hdd/boot/syslinux.cfg", | ||
| 132 | cr_workdir) | ||
| 133 | cfg = open("%s/hdd/boot/syslinux.cfg" % cr_workdir, "w") | ||
| 134 | cfg.write(syslinux_conf) | ||
| 135 | cfg.close() | ||
| 136 | |||
| 137 | @classmethod | ||
| 138 | def do_prepare_partition(cls, part, source_params, creator, cr_workdir, | ||
| 139 | oe_builddir, bootimg_dir, kernel_dir, | ||
| 140 | rootfs_dir, native_sysroot): | ||
| 141 | """ | ||
| 142 | Called to do the actual content population for a partition i.e. it | ||
| 143 | 'prepares' the partition to be incorporated into the image. | ||
| 144 | In this case, prepare content for legacy bios boot partition. | ||
| 145 | """ | ||
| 146 | bootimg_dir = cls._get_bootimg_dir(bootimg_dir, 'syslinux') | ||
| 147 | |||
| 148 | staging_kernel_dir = kernel_dir | ||
| 149 | |||
| 150 | hdddir = "%s/hdd/boot" % cr_workdir | ||
| 151 | |||
| 152 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") | ||
| 153 | if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1": | ||
| 154 | if get_bitbake_var("INITRAMFS_IMAGE"): | ||
| 155 | kernel = "%s-%s.bin" % \ | ||
| 156 | (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) | ||
| 157 | |||
| 158 | cmds = ("install -m 0644 %s/%s %s/%s" % | ||
| 159 | (staging_kernel_dir, kernel, hdddir, get_bitbake_var("KERNEL_IMAGETYPE")), | ||
| 160 | "install -m 444 %s/syslinux/ldlinux.sys %s/ldlinux.sys" % | ||
| 161 | (bootimg_dir, hdddir), | ||
| 162 | "install -m 0644 %s/syslinux/vesamenu.c32 %s/vesamenu.c32" % | ||
| 163 | (bootimg_dir, hdddir), | ||
| 164 | "install -m 444 %s/syslinux/libcom32.c32 %s/libcom32.c32" % | ||
| 165 | (bootimg_dir, hdddir), | ||
| 166 | "install -m 444 %s/syslinux/libutil.c32 %s/libutil.c32" % | ||
| 167 | (bootimg_dir, hdddir)) | ||
| 168 | |||
| 169 | for install_cmd in cmds: | ||
| 170 | exec_cmd(install_cmd) | ||
| 171 | |||
| 172 | du_cmd = "du -bks %s" % hdddir | ||
| 173 | out = exec_cmd(du_cmd) | ||
| 174 | blocks = int(out.split()[0]) | ||
| 175 | |||
| 176 | extra_blocks = part.get_extra_block_count(blocks) | ||
| 177 | |||
| 178 | if extra_blocks < BOOTDD_EXTRA_SPACE: | ||
| 179 | extra_blocks = BOOTDD_EXTRA_SPACE | ||
| 180 | |||
| 181 | blocks += extra_blocks | ||
| 182 | |||
| 183 | logger.debug("Added %d extra blocks to %s to get to %d total blocks", | ||
| 184 | extra_blocks, part.mountpoint, blocks) | ||
| 185 | |||
| 186 | # dosfs image, created by mkdosfs | ||
| 187 | bootimg = "%s/boot%s.img" % (cr_workdir, part.lineno) | ||
| 188 | |||
| 189 | label = part.label if part.label else "boot" | ||
| 190 | |||
| 191 | dosfs_cmd = "mkdosfs -n %s -i %s -S 512 -C %s %d" % \ | ||
| 192 | (label, part.fsuuid, bootimg, blocks) | ||
| 193 | exec_native_cmd(dosfs_cmd, native_sysroot) | ||
| 194 | |||
| 195 | mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (bootimg, hdddir) | ||
| 196 | exec_native_cmd(mcopy_cmd, native_sysroot) | ||
| 197 | |||
| 198 | syslinux_cmd = "syslinux %s" % bootimg | ||
| 199 | exec_native_cmd(syslinux_cmd, native_sysroot) | ||
| 200 | |||
| 201 | chmod_cmd = "chmod 644 %s" % bootimg | ||
| 202 | exec_cmd(chmod_cmd) | ||
| 203 | |||
| 204 | du_cmd = "du -Lbks %s" % bootimg | ||
| 205 | out = exec_cmd(du_cmd) | ||
| 206 | bootimg_size = out.split()[0] | ||
| 207 | |||
| 208 | part.size = int(bootimg_size) | ||
| 209 | part.source_file = bootimg | ||
diff --git a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py b/scripts/lib/wic/plugins/source/bootimg_biosplusefi.py index 5bd7390680..4279ddded8 100644 --- a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py +++ b/scripts/lib/wic/plugins/source/bootimg_biosplusefi.py | |||
| @@ -13,7 +13,7 @@ | |||
| 13 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | 13 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
| 14 | # | 14 | # |
| 15 | # DESCRIPTION | 15 | # DESCRIPTION |
| 16 | # This implements the 'bootimg-biosplusefi' source plugin class for 'wic' | 16 | # This implements the 'bootimg_biosplusefi' source plugin class for 'wic' |
| 17 | # | 17 | # |
| 18 | # AUTHORS | 18 | # AUTHORS |
| 19 | # William Bourque <wbourque [at) gmail.com> | 19 | # William Bourque <wbourque [at) gmail.com> |
| @@ -34,7 +34,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
| 34 | 34 | ||
| 35 | Note it is possible to create an image that can boot from both | 35 | Note it is possible to create an image that can boot from both |
| 36 | legacy BIOS and EFI by defining two partitions : one with arg | 36 | legacy BIOS and EFI by defining two partitions : one with arg |
| 37 | --source bootimg-efi and another one with --source bootimg-pcbios. | 37 | --source bootimg_efi and another one with --source bootimg_pcbios. |
| 38 | However, this method has the obvious downside that it requires TWO | 38 | However, this method has the obvious downside that it requires TWO |
| 39 | partitions to be created on the storage device. | 39 | partitions to be created on the storage device. |
| 40 | Both partitions will also be marked as "bootable" which does not work on | 40 | Both partitions will also be marked as "bootable" which does not work on |
| @@ -45,7 +45,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
| 45 | the first partition will be duplicated into the second, even though it | 45 | the first partition will be duplicated into the second, even though it |
| 46 | will not be used at all. | 46 | will not be used at all. |
| 47 | 47 | ||
| 48 | Also, unlike "isoimage-isohybrid" that also does BIOS and EFI, this plugin | 48 | Also, unlike "isoimage_isohybrid" that also does BIOS and EFI, this plugin |
| 49 | allows you to have more than only a single rootfs partitions and does | 49 | allows you to have more than only a single rootfs partitions and does |
| 50 | not turn the rootfs into an initramfs RAM image. | 50 | not turn the rootfs into an initramfs RAM image. |
| 51 | 51 | ||
| @@ -53,32 +53,32 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
| 53 | does not have the limitations listed above. | 53 | does not have the limitations listed above. |
| 54 | 54 | ||
| 55 | The plugin is made so it does tries not to reimplement what's already | 55 | The plugin is made so it does tries not to reimplement what's already |
| 56 | been done in other plugins; as such it imports "bootimg-pcbios" | 56 | been done in other plugins; as such it imports "bootimg_pcbios" |
| 57 | and "bootimg-efi". | 57 | and "bootimg_efi". |
| 58 | Plugin "bootimg-pcbios" is used to generate legacy BIOS boot. | 58 | Plugin "bootimg_pcbios" is used to generate legacy BIOS boot. |
| 59 | Plugin "bootimg-efi" is used to generate the UEFI boot. Note that it | 59 | Plugin "bootimg_efi" is used to generate the UEFI boot. Note that it |
| 60 | requires a --sourceparams argument to know which loader to use; refer | 60 | requires a --sourceparams argument to know which loader to use; refer |
| 61 | to "bootimg-efi" code/documentation for the list of loader. | 61 | to "bootimg_efi" code/documentation for the list of loader. |
| 62 | 62 | ||
| 63 | Imports are handled with "SourceFileLoader" from importlib as it is | 63 | Imports are handled with "SourceFileLoader" from importlib as it is |
| 64 | otherwise very difficult to import module that has hyphen "-" in their | 64 | otherwise very difficult to import module that has hyphen "-" in their |
| 65 | filename. | 65 | filename. |
| 66 | The SourcePlugin() methods used in the plugins (do_install_disk, | 66 | The SourcePlugin() methods used in the plugins (do_install_disk, |
| 67 | do_configure_partition, do_prepare_partition) are then called on both, | 67 | do_configure_partition, do_prepare_partition) are then called on both, |
| 68 | beginning by "bootimg-efi". | 68 | beginning by "bootimg_efi". |
| 69 | 69 | ||
| 70 | Plugin options, such as "--sourceparams" can still be passed to a | 70 | Plugin options, such as "--sourceparams" can still be passed to a |
| 71 | plugin, as long they does not cause issue in the other plugin. | 71 | plugin, as long they does not cause issue in the other plugin. |
| 72 | 72 | ||
| 73 | Example wic configuration: | 73 | Example wic configuration: |
| 74 | part /boot --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\\ | 74 | part /boot --source bootimg_biosplusefi --sourceparams="loader=grub-efi"\\ |
| 75 | --ondisk sda --label os_boot --active --align 1024 --use-uuid | 75 | --ondisk sda --label os_boot --active --align 1024 --use-uuid |
| 76 | """ | 76 | """ |
| 77 | 77 | ||
| 78 | name = 'bootimg-biosplusefi' | 78 | name = 'bootimg_biosplusefi' |
| 79 | 79 | ||
| 80 | __PCBIOS_MODULE_NAME = "bootimg-pcbios" | 80 | __PCBIOS_MODULE_NAME = "bootimg_pcbios" |
| 81 | __EFI_MODULE_NAME = "bootimg-efi" | 81 | __EFI_MODULE_NAME = "bootimg_efi" |
| 82 | 82 | ||
| 83 | __imgEFIObj = None | 83 | __imgEFIObj = None |
| 84 | __imgBiosObj = None | 84 | __imgBiosObj = None |
| @@ -100,7 +100,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
| 100 | 100 | ||
| 101 | """ | 101 | """ |
| 102 | 102 | ||
| 103 | # Import bootimg-pcbios (class name "BootimgPcbiosPlugin") | 103 | # Import bootimg_pcbios (class name "BootimgPcbiosPlugin") |
| 104 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), | 104 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), |
| 105 | cls.__PCBIOS_MODULE_NAME + ".py") | 105 | cls.__PCBIOS_MODULE_NAME + ".py") |
| 106 | loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath) | 106 | loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath) |
| @@ -108,7 +108,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
| 108 | loader.exec_module(mod) | 108 | loader.exec_module(mod) |
| 109 | cls.__imgBiosObj = mod.BootimgPcbiosPlugin() | 109 | cls.__imgBiosObj = mod.BootimgPcbiosPlugin() |
| 110 | 110 | ||
| 111 | # Import bootimg-efi (class name "BootimgEFIPlugin") | 111 | # Import bootimg_efi (class name "BootimgEFIPlugin") |
| 112 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), | 112 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), |
| 113 | cls.__EFI_MODULE_NAME + ".py") | 113 | cls.__EFI_MODULE_NAME + ".py") |
| 114 | loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath) | 114 | loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath) |
diff --git a/scripts/lib/wic/plugins/source/bootimg-efi.py b/scripts/lib/wic/plugins/source/bootimg_efi.py index 13a9cddf4e..cf16705a28 100644 --- a/scripts/lib/wic/plugins/source/bootimg-efi.py +++ b/scripts/lib/wic/plugins/source/bootimg_efi.py | |||
| @@ -4,7 +4,7 @@ | |||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
| 5 | # | 5 | # |
| 6 | # DESCRIPTION | 6 | # DESCRIPTION |
| 7 | # This implements the 'bootimg-efi' source plugin class for 'wic' | 7 | # This implements the 'bootimg_efi' source plugin class for 'wic' |
| 8 | # | 8 | # |
| 9 | # AUTHORS | 9 | # AUTHORS |
| 10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> | 10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> |
| @@ -32,7 +32,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
| 32 | This plugin supports GRUB 2 and systemd-boot bootloaders. | 32 | This plugin supports GRUB 2 and systemd-boot bootloaders. |
| 33 | """ | 33 | """ |
| 34 | 34 | ||
| 35 | name = 'bootimg-efi' | 35 | name = 'bootimg_efi' |
| 36 | 36 | ||
| 37 | @classmethod | 37 | @classmethod |
| 38 | def _copy_additional_files(cls, hdddir, initrd, dtb): | 38 | def _copy_additional_files(cls, hdddir, initrd, dtb): |
| @@ -43,16 +43,18 @@ class BootimgEFIPlugin(SourcePlugin): | |||
| 43 | if initrd: | 43 | if initrd: |
| 44 | initrds = initrd.split(';') | 44 | initrds = initrd.split(';') |
| 45 | for rd in initrds: | 45 | for rd in initrds: |
| 46 | cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir) | 46 | cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, rd, hdddir) |
| 47 | exec_cmd(cp_cmd, True) | 47 | out = exec_cmd(cp_cmd, True) |
| 48 | logger.debug("initrd files:\n%s" % (out)) | ||
| 48 | else: | 49 | else: |
| 49 | logger.debug("Ignoring missing initrd") | 50 | logger.debug("Ignoring missing initrd") |
| 50 | 51 | ||
| 51 | if dtb: | 52 | if dtb: |
| 52 | if ';' in dtb: | 53 | if ';' in dtb: |
| 53 | raise WicError("Only one DTB supported, exiting") | 54 | raise WicError("Only one DTB supported, exiting") |
| 54 | cp_cmd = "cp %s/%s %s" % (bootimg_dir, dtb, hdddir) | 55 | cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, dtb, hdddir) |
| 55 | exec_cmd(cp_cmd, True) | 56 | out = exec_cmd(cp_cmd, True) |
| 57 | logger.debug("dtb files:\n%s" % (out)) | ||
| 56 | 58 | ||
| 57 | @classmethod | 59 | @classmethod |
| 58 | def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params): | 60 | def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params): |
| @@ -123,8 +125,16 @@ class BootimgEFIPlugin(SourcePlugin): | |||
| 123 | @classmethod | 125 | @classmethod |
| 124 | def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params): | 126 | def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params): |
| 125 | """ | 127 | """ |
| 126 | Create loader-specific systemd-boot/gummiboot config | 128 | Create loader-specific systemd-boot/gummiboot config. Unified Kernel Image (uki) |
| 129 | support is done in image recipe with uki.bbclass and only systemd-boot loader config | ||
| 130 | and ESP partition structure is created here. | ||
| 127 | """ | 131 | """ |
| 132 | # detect uki.bbclass usage | ||
| 133 | image_classes = get_bitbake_var("IMAGE_CLASSES").split() | ||
| 134 | unified_image = False | ||
| 135 | if "uki" in image_classes: | ||
| 136 | unified_image = True | ||
| 137 | |||
| 128 | install_cmd = "install -d %s/loader" % hdddir | 138 | install_cmd = "install -d %s/loader" % hdddir |
| 129 | exec_cmd(install_cmd) | 139 | exec_cmd(install_cmd) |
| 130 | 140 | ||
| @@ -132,28 +142,26 @@ class BootimgEFIPlugin(SourcePlugin): | |||
| 132 | exec_cmd(install_cmd) | 142 | exec_cmd(install_cmd) |
| 133 | 143 | ||
| 134 | bootloader = creator.ks.bootloader | 144 | bootloader = creator.ks.bootloader |
| 135 | |||
| 136 | unified_image = source_params.get('create-unified-kernel-image') == "true" | ||
| 137 | |||
| 138 | loader_conf = "" | 145 | loader_conf = "" |
| 139 | if not unified_image: | ||
| 140 | loader_conf += "default boot\n" | ||
| 141 | loader_conf += "timeout %d\n" % bootloader.timeout | ||
| 142 | 146 | ||
| 143 | initrd = source_params.get('initrd') | 147 | # 5 seconds is a sensible default timeout |
| 144 | dtb = source_params.get('dtb') | 148 | loader_conf += "timeout %d\n" % (bootloader.timeout or 5) |
| 145 | |||
| 146 | if not unified_image: | ||
| 147 | cls._copy_additional_files(hdddir, initrd, dtb) | ||
| 148 | 149 | ||
| 149 | logger.debug("Writing systemd-boot config " | 150 | logger.debug("Writing systemd-boot config " |
| 150 | "%s/hdd/boot/loader/loader.conf", cr_workdir) | 151 | "%s/hdd/boot/loader/loader.conf", cr_workdir) |
| 151 | cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w") | 152 | cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w") |
| 152 | cfg.write(loader_conf) | 153 | cfg.write(loader_conf) |
| 154 | logger.debug("loader.conf:\n%s" % (loader_conf)) | ||
| 153 | cfg.close() | 155 | cfg.close() |
| 154 | 156 | ||
| 157 | initrd = source_params.get('initrd') | ||
| 158 | dtb = source_params.get('dtb') | ||
| 159 | if not unified_image: | ||
| 160 | cls._copy_additional_files(hdddir, initrd, dtb) | ||
| 161 | |||
| 155 | configfile = creator.ks.bootloader.configfile | 162 | configfile = creator.ks.bootloader.configfile |
| 156 | custom_cfg = None | 163 | custom_cfg = None |
| 164 | boot_conf = "" | ||
| 157 | if configfile: | 165 | if configfile: |
| 158 | custom_cfg = get_custom_config(configfile) | 166 | custom_cfg = get_custom_config(configfile) |
| 159 | if custom_cfg: | 167 | if custom_cfg: |
| @@ -164,8 +172,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
| 164 | else: | 172 | else: |
| 165 | raise WicError("configfile is specified but failed to " | 173 | raise WicError("configfile is specified but failed to " |
| 166 | "get it from %s.", configfile) | 174 | "get it from %s.", configfile) |
| 167 | 175 | else: | |
| 168 | if not custom_cfg: | ||
| 169 | # Create systemd-boot configuration using parameters from wks file | 176 | # Create systemd-boot configuration using parameters from wks file |
| 170 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") | 177 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") |
| 171 | if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1": | 178 | if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1": |
| @@ -175,7 +182,6 @@ class BootimgEFIPlugin(SourcePlugin): | |||
| 175 | 182 | ||
| 176 | title = source_params.get('title') | 183 | title = source_params.get('title') |
| 177 | 184 | ||
| 178 | boot_conf = "" | ||
| 179 | boot_conf += "title %s\n" % (title if title else "boot") | 185 | boot_conf += "title %s\n" % (title if title else "boot") |
| 180 | boot_conf += "linux /%s\n" % kernel | 186 | boot_conf += "linux /%s\n" % kernel |
| 181 | 187 | ||
| @@ -200,6 +206,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
| 200 | "%s/hdd/boot/loader/entries/boot.conf", cr_workdir) | 206 | "%s/hdd/boot/loader/entries/boot.conf", cr_workdir) |
| 201 | cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w") | 207 | cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w") |
| 202 | cfg.write(boot_conf) | 208 | cfg.write(boot_conf) |
| 209 | logger.debug("boot.conf:\n%s" % (boot_conf)) | ||
| 203 | cfg.close() | 210 | cfg.close() |
| 204 | 211 | ||
| 205 | 212 | ||
| @@ -223,9 +230,9 @@ class BootimgEFIPlugin(SourcePlugin): | |||
| 223 | elif source_params['loader'] == 'uefi-kernel': | 230 | elif source_params['loader'] == 'uefi-kernel': |
| 224 | pass | 231 | pass |
| 225 | else: | 232 | else: |
| 226 | raise WicError("unrecognized bootimg-efi loader: %s" % source_params['loader']) | 233 | raise WicError("unrecognized bootimg_efi loader: %s" % source_params['loader']) |
| 227 | except KeyError: | 234 | except KeyError: |
| 228 | raise WicError("bootimg-efi requires a loader, none specified") | 235 | raise WicError("bootimg_efi requires a loader, none specified") |
| 229 | 236 | ||
| 230 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None: | 237 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None: |
| 231 | logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES') | 238 | logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES') |
| @@ -245,7 +252,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
| 245 | 252 | ||
| 246 | # list of tuples (src_name, dst_name) | 253 | # list of tuples (src_name, dst_name) |
| 247 | deploy_files = [] | 254 | deploy_files = [] |
| 248 | for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): | 255 | for src_entry in re.findall(r'[\w;\-\.\+/\*]+', boot_files): |
| 249 | if ';' in src_entry: | 256 | if ';' in src_entry: |
| 250 | dst_entry = tuple(src_entry.split(';')) | 257 | dst_entry = tuple(src_entry.split(';')) |
| 251 | if not dst_entry[0] or not dst_entry[1]: | 258 | if not dst_entry[0] or not dst_entry[1]: |
| @@ -304,134 +311,43 @@ class BootimgEFIPlugin(SourcePlugin): | |||
| 304 | (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) | 311 | (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) |
| 305 | 312 | ||
| 306 | if source_params.get('create-unified-kernel-image') == "true": | 313 | if source_params.get('create-unified-kernel-image') == "true": |
| 307 | initrd = source_params.get('initrd') | 314 | raise WicError("create-unified-kernel-image is no longer supported. Please use uki.bbclass.") |
| 308 | if not initrd: | ||
| 309 | raise WicError("initrd= must be specified when create-unified-kernel-image=true, exiting") | ||
| 310 | |||
| 311 | deploy_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") | ||
| 312 | efi_stub = glob("%s/%s" % (deploy_dir, "linux*.efi.stub")) | ||
| 313 | if len(efi_stub) == 0: | ||
| 314 | raise WicError("Unified Kernel Image EFI stub not found, exiting") | ||
| 315 | efi_stub = efi_stub[0] | ||
| 316 | |||
| 317 | with tempfile.TemporaryDirectory() as tmp_dir: | ||
| 318 | label = source_params.get('label') | ||
| 319 | label_conf = "root=%s" % creator.rootdev | ||
| 320 | if label: | ||
| 321 | label_conf = "LABEL=%s" % label | ||
| 322 | |||
| 323 | bootloader = creator.ks.bootloader | ||
| 324 | cmdline = open("%s/cmdline" % tmp_dir, "w") | ||
| 325 | cmdline.write("%s %s" % (label_conf, bootloader.append)) | ||
| 326 | cmdline.close() | ||
| 327 | 315 | ||
| 328 | initrds = initrd.split(';') | 316 | if source_params.get('install-kernel-into-boot-dir') != 'false': |
| 329 | initrd = open("%s/initrd" % tmp_dir, "wb") | 317 | install_cmd = "install -v -p -m 0644 %s/%s %s/%s" % \ |
| 330 | for f in initrds: | 318 | (staging_kernel_dir, kernel, hdddir, kernel) |
| 331 | with open("%s/%s" % (deploy_dir, f), 'rb') as in_file: | 319 | out = exec_cmd(install_cmd) |
| 332 | shutil.copyfileobj(in_file, initrd) | 320 | logger.debug("Installed kernel files:\n%s" % out) |
| 333 | initrd.close() | ||
| 334 | |||
| 335 | # Searched by systemd-boot: | ||
| 336 | # https://systemd.io/BOOT_LOADER_SPECIFICATION/#type-2-efi-unified-kernel-images | ||
| 337 | install_cmd = "install -d %s/EFI/Linux" % hdddir | ||
| 338 | exec_cmd(install_cmd) | ||
| 339 | |||
| 340 | staging_dir_host = get_bitbake_var("STAGING_DIR_HOST") | ||
| 341 | target_sys = get_bitbake_var("TARGET_SYS") | ||
| 342 | |||
| 343 | objdump_cmd = "%s-objdump" % target_sys | ||
| 344 | objdump_cmd += " -p %s" % efi_stub | ||
| 345 | objdump_cmd += " | awk '{ if ($1 == \"SectionAlignment\"){print $2} }'" | ||
| 346 | |||
| 347 | ret, align_str = exec_native_cmd(objdump_cmd, native_sysroot) | ||
| 348 | align = int(align_str, 16) | ||
| 349 | |||
| 350 | objdump_cmd = "%s-objdump" % target_sys | ||
| 351 | objdump_cmd += " -h %s | tail -2" % efi_stub | ||
| 352 | ret, output = exec_native_cmd(objdump_cmd, native_sysroot) | ||
| 353 | |||
| 354 | offset = int(output.split()[2], 16) + int(output.split()[3], 16) | ||
| 355 | |||
| 356 | osrel_off = offset + align - offset % align | ||
| 357 | osrel_path = "%s/usr/lib/os-release" % staging_dir_host | ||
| 358 | osrel_sz = os.stat(osrel_path).st_size | ||
| 359 | |||
| 360 | cmdline_off = osrel_off + osrel_sz | ||
| 361 | cmdline_off = cmdline_off + align - cmdline_off % align | ||
| 362 | cmdline_sz = os.stat(cmdline.name).st_size | ||
| 363 | |||
| 364 | dtb_off = cmdline_off + cmdline_sz | ||
| 365 | dtb_off = dtb_off + align - dtb_off % align | ||
| 366 | |||
| 367 | dtb = source_params.get('dtb') | ||
| 368 | if dtb: | ||
| 369 | if ';' in dtb: | ||
| 370 | raise WicError("Only one DTB supported, exiting") | ||
| 371 | dtb_path = "%s/%s" % (deploy_dir, dtb) | ||
| 372 | dtb_params = '--add-section .dtb=%s --change-section-vma .dtb=0x%x' % \ | ||
| 373 | (dtb_path, dtb_off) | ||
| 374 | linux_off = dtb_off + os.stat(dtb_path).st_size | ||
| 375 | linux_off = linux_off + align - linux_off % align | ||
| 376 | else: | ||
| 377 | dtb_params = '' | ||
| 378 | linux_off = dtb_off | ||
| 379 | |||
| 380 | linux_path = "%s/%s" % (staging_kernel_dir, kernel) | ||
| 381 | linux_sz = os.stat(linux_path).st_size | ||
| 382 | |||
| 383 | initrd_off = linux_off + linux_sz | ||
| 384 | initrd_off = initrd_off + align - initrd_off % align | ||
| 385 | |||
| 386 | # https://www.freedesktop.org/software/systemd/man/systemd-stub.html | ||
| 387 | objcopy_cmd = "%s-objcopy" % target_sys | ||
| 388 | objcopy_cmd += " --enable-deterministic-archives" | ||
| 389 | objcopy_cmd += " --preserve-dates" | ||
| 390 | objcopy_cmd += " --add-section .osrel=%s" % osrel_path | ||
| 391 | objcopy_cmd += " --change-section-vma .osrel=0x%x" % osrel_off | ||
| 392 | objcopy_cmd += " --add-section .cmdline=%s" % cmdline.name | ||
| 393 | objcopy_cmd += " --change-section-vma .cmdline=0x%x" % cmdline_off | ||
| 394 | objcopy_cmd += dtb_params | ||
| 395 | objcopy_cmd += " --add-section .linux=%s" % linux_path | ||
| 396 | objcopy_cmd += " --change-section-vma .linux=0x%x" % linux_off | ||
| 397 | objcopy_cmd += " --add-section .initrd=%s" % initrd.name | ||
| 398 | objcopy_cmd += " --change-section-vma .initrd=0x%x" % initrd_off | ||
| 399 | objcopy_cmd += " %s %s/EFI/Linux/linux.efi" % (efi_stub, hdddir) | ||
| 400 | |||
| 401 | exec_native_cmd(objcopy_cmd, native_sysroot) | ||
| 402 | else: | ||
| 403 | if source_params.get('install-kernel-into-boot-dir') != 'false': | ||
| 404 | install_cmd = "install -m 0644 %s/%s %s/%s" % \ | ||
| 405 | (staging_kernel_dir, kernel, hdddir, kernel) | ||
| 406 | exec_cmd(install_cmd) | ||
| 407 | 321 | ||
| 408 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES"): | 322 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES"): |
| 409 | for src_path, dst_path in cls.install_task: | 323 | for src_path, dst_path in cls.install_task: |
| 410 | install_cmd = "install -m 0644 -D %s %s" \ | 324 | install_cmd = "install -v -p -m 0644 -D %s %s" \ |
| 411 | % (os.path.join(kernel_dir, src_path), | 325 | % (os.path.join(kernel_dir, src_path), |
| 412 | os.path.join(hdddir, dst_path)) | 326 | os.path.join(hdddir, dst_path)) |
| 413 | exec_cmd(install_cmd) | 327 | out = exec_cmd(install_cmd) |
| 328 | logger.debug("Installed IMAGE_EFI_BOOT_FILES:\n%s" % out) | ||
| 414 | 329 | ||
| 415 | try: | 330 | try: |
| 416 | if source_params['loader'] == 'grub-efi': | 331 | if source_params['loader'] == 'grub-efi': |
| 417 | shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir, | 332 | shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir, |
| 418 | "%s/grub.cfg" % cr_workdir) | 333 | "%s/grub.cfg" % cr_workdir) |
| 419 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]: | 334 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]: |
| 420 | cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:]) | 335 | cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:]) |
| 421 | exec_cmd(cp_cmd, True) | 336 | exec_cmd(cp_cmd, True) |
| 422 | shutil.move("%s/grub.cfg" % cr_workdir, | 337 | shutil.move("%s/grub.cfg" % cr_workdir, |
| 423 | "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir) | 338 | "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir) |
| 424 | elif source_params['loader'] == 'systemd-boot': | 339 | elif source_params['loader'] == 'systemd-boot': |
| 425 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]: | 340 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]: |
| 426 | cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:]) | 341 | cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:]) |
| 427 | exec_cmd(cp_cmd, True) | 342 | out = exec_cmd(cp_cmd, True) |
| 343 | logger.debug("systemd-boot files:\n%s" % out) | ||
| 428 | elif source_params['loader'] == 'uefi-kernel': | 344 | elif source_params['loader'] == 'uefi-kernel': |
| 429 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") | 345 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") |
| 430 | if not kernel: | 346 | if not kernel: |
| 431 | raise WicError("Empty KERNEL_IMAGETYPE %s\n" % target) | 347 | raise WicError("Empty KERNEL_IMAGETYPE") |
| 432 | target = get_bitbake_var("TARGET_SYS") | 348 | target = get_bitbake_var("TARGET_SYS") |
| 433 | if not target: | 349 | if not target: |
| 434 | raise WicError("Unknown arch (TARGET_SYS) %s\n" % target) | 350 | raise WicError("Empty TARGET_SYS") |
| 435 | 351 | ||
| 436 | if re.match("x86_64", target): | 352 | if re.match("x86_64", target): |
| 437 | kernel_efi_image = "bootx64.efi" | 353 | kernel_efi_image = "bootx64.efi" |
| @@ -445,23 +361,33 @@ class BootimgEFIPlugin(SourcePlugin): | |||
| 445 | raise WicError("UEFI stub kernel is incompatible with target %s" % target) | 361 | raise WicError("UEFI stub kernel is incompatible with target %s" % target) |
| 446 | 362 | ||
| 447 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]: | 363 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]: |
| 448 | cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image) | 364 | cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image) |
| 449 | exec_cmd(cp_cmd, True) | 365 | out = exec_cmd(cp_cmd, True) |
| 366 | logger.debug("uefi-kernel files:\n%s" % out) | ||
| 450 | else: | 367 | else: |
| 451 | raise WicError("unrecognized bootimg-efi loader: %s" % | 368 | raise WicError("unrecognized bootimg_efi loader: %s" % |
| 452 | source_params['loader']) | 369 | source_params['loader']) |
| 370 | |||
| 371 | # must have installed at least one EFI bootloader | ||
| 372 | out = glob(os.path.join(hdddir, 'EFI', 'BOOT', 'boot*.efi')) | ||
| 373 | logger.debug("Installed EFI loader files:\n%s" % out) | ||
| 374 | if not out: | ||
| 375 | raise WicError("No EFI loaders installed to ESP partition. Check that grub-efi, systemd-boot or similar is installed.") | ||
| 376 | |||
| 453 | except KeyError: | 377 | except KeyError: |
| 454 | raise WicError("bootimg-efi requires a loader, none specified") | 378 | raise WicError("bootimg_efi requires a loader, none specified") |
| 455 | 379 | ||
| 456 | startup = os.path.join(kernel_dir, "startup.nsh") | 380 | startup = os.path.join(kernel_dir, "startup.nsh") |
| 457 | if os.path.exists(startup): | 381 | if os.path.exists(startup): |
| 458 | cp_cmd = "cp %s %s/" % (startup, hdddir) | 382 | cp_cmd = "cp -v -p %s %s/" % (startup, hdddir) |
| 459 | exec_cmd(cp_cmd, True) | 383 | out = exec_cmd(cp_cmd, True) |
| 384 | logger.debug("startup files:\n%s" % out) | ||
| 460 | 385 | ||
| 461 | for paths in part.include_path or []: | 386 | for paths in part.include_path or []: |
| 462 | for path in paths: | 387 | for path in paths: |
| 463 | cp_cmd = "cp -r %s %s/" % (path, hdddir) | 388 | cp_cmd = "cp -v -p -r %s %s/" % (path, hdddir) |
| 464 | exec_cmd(cp_cmd, True) | 389 | exec_cmd(cp_cmd, True) |
| 390 | logger.debug("include_path files:\n%s" % out) | ||
| 465 | 391 | ||
| 466 | du_cmd = "du -bks %s" % hdddir | 392 | du_cmd = "du -bks %s" % hdddir |
| 467 | out = exec_cmd(du_cmd) | 393 | out = exec_cmd(du_cmd) |
| @@ -489,12 +415,14 @@ class BootimgEFIPlugin(SourcePlugin): | |||
| 489 | 415 | ||
| 490 | label = part.label if part.label else "ESP" | 416 | label = part.label if part.label else "ESP" |
| 491 | 417 | ||
| 492 | dosfs_cmd = "mkdosfs -n %s -i %s -C %s %d" % \ | 418 | dosfs_cmd = "mkdosfs -v -n %s -i %s -C %s %d" % \ |
| 493 | (label, part.fsuuid, bootimg, blocks) | 419 | (label, part.fsuuid, bootimg, blocks) |
| 494 | exec_native_cmd(dosfs_cmd, native_sysroot) | 420 | exec_native_cmd(dosfs_cmd, native_sysroot) |
| 421 | logger.debug("mkdosfs:\n%s" % (str(out))) | ||
| 495 | 422 | ||
| 496 | mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (bootimg, hdddir) | 423 | mcopy_cmd = "mcopy -v -p -i %s -s %s/* ::/" % (bootimg, hdddir) |
| 497 | exec_native_cmd(mcopy_cmd, native_sysroot) | 424 | out = exec_native_cmd(mcopy_cmd, native_sysroot) |
| 425 | logger.debug("mcopy:\n%s" % (str(out))) | ||
| 498 | 426 | ||
| 499 | chmod_cmd = "chmod 644 %s" % bootimg | 427 | chmod_cmd = "chmod 644 %s" % bootimg |
| 500 | exec_cmd(chmod_cmd) | 428 | exec_cmd(chmod_cmd) |
diff --git a/scripts/lib/wic/plugins/source/bootimg-partition.py b/scripts/lib/wic/plugins/source/bootimg_partition.py index 1071d1af3f..cc121a78f0 100644 --- a/scripts/lib/wic/plugins/source/bootimg-partition.py +++ b/scripts/lib/wic/plugins/source/bootimg_partition.py | |||
| @@ -4,7 +4,7 @@ | |||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
| 5 | # | 5 | # |
| 6 | # DESCRIPTION | 6 | # DESCRIPTION |
| 7 | # This implements the 'bootimg-partition' source plugin class for | 7 | # This implements the 'bootimg_partition' source plugin class for |
| 8 | # 'wic'. The plugin creates an image of boot partition, copying over | 8 | # 'wic'. The plugin creates an image of boot partition, copying over |
| 9 | # files listed in IMAGE_BOOT_FILES bitbake variable. | 9 | # files listed in IMAGE_BOOT_FILES bitbake variable. |
| 10 | # | 10 | # |
| @@ -16,7 +16,7 @@ import logging | |||
| 16 | import os | 16 | import os |
| 17 | import re | 17 | import re |
| 18 | 18 | ||
| 19 | from glob import glob | 19 | from oe.bootfiles import get_boot_files |
| 20 | 20 | ||
| 21 | from wic import WicError | 21 | from wic import WicError |
| 22 | from wic.engine import get_custom_config | 22 | from wic.engine import get_custom_config |
| @@ -31,7 +31,7 @@ class BootimgPartitionPlugin(SourcePlugin): | |||
| 31 | listed in IMAGE_BOOT_FILES bitbake variable. | 31 | listed in IMAGE_BOOT_FILES bitbake variable. |
| 32 | """ | 32 | """ |
| 33 | 33 | ||
| 34 | name = 'bootimg-partition' | 34 | name = 'bootimg_partition' |
| 35 | image_boot_files_var_name = 'IMAGE_BOOT_FILES' | 35 | image_boot_files_var_name = 'IMAGE_BOOT_FILES' |
| 36 | 36 | ||
| 37 | @classmethod | 37 | @classmethod |
| @@ -66,42 +66,7 @@ class BootimgPartitionPlugin(SourcePlugin): | |||
| 66 | 66 | ||
| 67 | logger.debug('Boot files: %s', boot_files) | 67 | logger.debug('Boot files: %s', boot_files) |
| 68 | 68 | ||
| 69 | # list of tuples (src_name, dst_name) | 69 | cls.install_task = get_boot_files(kernel_dir, boot_files) |
| 70 | deploy_files = [] | ||
| 71 | for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): | ||
| 72 | if ';' in src_entry: | ||
| 73 | dst_entry = tuple(src_entry.split(';')) | ||
| 74 | if not dst_entry[0] or not dst_entry[1]: | ||
| 75 | raise WicError('Malformed boot file entry: %s' % src_entry) | ||
| 76 | else: | ||
| 77 | dst_entry = (src_entry, src_entry) | ||
| 78 | |||
| 79 | logger.debug('Destination entry: %r', dst_entry) | ||
| 80 | deploy_files.append(dst_entry) | ||
| 81 | |||
| 82 | cls.install_task = []; | ||
| 83 | for deploy_entry in deploy_files: | ||
| 84 | src, dst = deploy_entry | ||
| 85 | if '*' in src: | ||
| 86 | # by default install files under their basename | ||
| 87 | entry_name_fn = os.path.basename | ||
| 88 | if dst != src: | ||
| 89 | # unless a target name was given, then treat name | ||
| 90 | # as a directory and append a basename | ||
| 91 | entry_name_fn = lambda name: \ | ||
| 92 | os.path.join(dst, | ||
| 93 | os.path.basename(name)) | ||
| 94 | |||
| 95 | srcs = glob(os.path.join(kernel_dir, src)) | ||
| 96 | |||
| 97 | logger.debug('Globbed sources: %s', ', '.join(srcs)) | ||
| 98 | for entry in srcs: | ||
| 99 | src = os.path.relpath(entry, kernel_dir) | ||
| 100 | entry_dst_name = entry_name_fn(entry) | ||
| 101 | cls.install_task.append((src, entry_dst_name)) | ||
| 102 | else: | ||
| 103 | cls.install_task.append((src, dst)) | ||
| 104 | |||
| 105 | if source_params.get('loader') != "u-boot": | 70 | if source_params.get('loader') != "u-boot": |
| 106 | return | 71 | return |
| 107 | 72 | ||
diff --git a/scripts/lib/wic/plugins/source/bootimg_pcbios.py b/scripts/lib/wic/plugins/source/bootimg_pcbios.py new file mode 100644 index 0000000000..caabda6318 --- /dev/null +++ b/scripts/lib/wic/plugins/source/bootimg_pcbios.py | |||
| @@ -0,0 +1,483 @@ | |||
| 1 | # | ||
| 2 | # Copyright (c) 2014, Intel Corporation. | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | # DESCRIPTION | ||
| 7 | # This implements the 'bootimg_pcbios' source plugin class for 'wic' | ||
| 8 | # | ||
| 9 | # AUTHORS | ||
| 10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> | ||
| 11 | # | ||
| 12 | |||
| 13 | import logging | ||
| 14 | import os | ||
| 15 | import re | ||
| 16 | import shutil | ||
| 17 | |||
| 18 | from glob import glob | ||
| 19 | from wic import WicError | ||
| 20 | from wic.engine import get_custom_config | ||
| 21 | from wic.pluginbase import SourcePlugin | ||
| 22 | from wic.misc import (exec_cmd, exec_native_cmd, | ||
| 23 | get_bitbake_var, BOOTDD_EXTRA_SPACE) | ||
| 24 | |||
| 25 | logger = logging.getLogger('wic') | ||
| 26 | |||
| 27 | class BootimgPcbiosPlugin(SourcePlugin): | ||
| 28 | """ | ||
| 29 | Creates boot partition that is legacy BIOS firmare bootable with | ||
| 30 | MBR/MSDOS as partition table format. Plugin will install caller | ||
| 31 | selected bootloader directly to resulting wic image. | ||
| 32 | |||
| 33 | Supported Bootloaders: | ||
| 34 | * syslinux (default) | ||
| 35 | * grub | ||
| 36 | |||
| 37 | ****************** Wic Plugin Depends/Vars ****************** | ||
| 38 | WKS_FILE_DEPENDS = "grub-native grub" | ||
| 39 | WKS_FILE_DEPENDS = "syslinux-native syslinux" | ||
| 40 | |||
| 41 | # Optional variables | ||
| 42 | # GRUB_MKIMAGE_FORMAT_PC - Used to define target platform. | ||
| 43 | # GRUB_PREFIX_PATH - Used to define which directory | ||
| 44 | # grub config and modules are going | ||
| 45 | # to reside in. | ||
| 46 | GRUB_PREFIX_PATH = '/boot/grub2' # Default: /boot/grub | ||
| 47 | GRUB_MKIMAGE_FORMAT_PC = 'i386-pc' # Default: i386-pc | ||
| 48 | |||
| 49 | WICVARS:append = "\ | ||
| 50 | GRUB_PREFIX_PATH \ | ||
| 51 | GRUB_MKIMAGE_FORMAT_PC \ | ||
| 52 | " | ||
| 53 | ****************** Wic Plugin Depends/Vars ****************** | ||
| 54 | |||
| 55 | |||
| 56 | **************** Example kickstart Legacy Bios Grub Boot **************** | ||
| 57 | part boot --label bios_boot --fstype ext4 --offset 1024 --fixed-size 78M | ||
| 58 | --source bootimg_pcbios --sourceparams="loader-bios=grub" --active | ||
| 59 | |||
| 60 | part roots --label rootfs --fstype ext4 --source rootfs --use-uuid | ||
| 61 | bootloader --ptable msdos --source bootimg_pcbios | ||
| 62 | **************** Example kickstart Legacy Bios Grub Boot **************** | ||
| 63 | |||
| 64 | |||
| 65 | *************** Example kickstart Legacy Bios Syslinux Boot **************** | ||
| 66 | part /boot --source bootimg_pcbios --sourceparams="loader-bios=syslinux" | ||
| 67 | --ondisk sda --label boot --fstype vfat --align 1024 --active | ||
| 68 | |||
| 69 | part roots --label rootfs --fstype ext4 --source rootfs --use-uuid | ||
| 70 | bootloader --ptable msdos --source bootimg_pcbios | ||
| 71 | """ | ||
| 72 | |||
| 73 | name = 'bootimg_pcbios' | ||
| 74 | |||
| 75 | # Variable required for do_install_disk | ||
| 76 | loader = '' | ||
| 77 | |||
| 78 | @classmethod | ||
| 79 | def _get_bootimg_dir(cls, bootimg_dir, dirname): | ||
| 80 | """ | ||
| 81 | Check if dirname exists in default bootimg_dir or in STAGING_DIR. | ||
| 82 | """ | ||
| 83 | staging_datadir = get_bitbake_var("STAGING_DATADIR") | ||
| 84 | for result in (bootimg_dir, staging_datadir): | ||
| 85 | if os.path.exists("%s/%s" % (result, dirname)): | ||
| 86 | return result | ||
| 87 | |||
| 88 | # STAGING_DATADIR is expanded with MLPREFIX if multilib is enabled | ||
| 89 | # but dependency syslinux is still populated to original STAGING_DATADIR | ||
| 90 | nonarch_datadir = re.sub('/[^/]*recipe-sysroot', '/recipe-sysroot', staging_datadir) | ||
| 91 | if os.path.exists(os.path.join(nonarch_datadir, dirname)): | ||
| 92 | return nonarch_datadir | ||
| 93 | |||
| 94 | raise WicError("Couldn't find correct bootimg_dir, exiting") | ||
| 95 | |||
| 96 | @classmethod | ||
| 97 | def do_install_disk(cls, disk, disk_name, creator, workdir, oe_builddir, | ||
| 98 | bootimg_dir, kernel_dir, native_sysroot): | ||
| 99 | full_path = creator._full_path(workdir, disk_name, "direct") | ||
| 100 | logger.debug("Installing MBR on disk %s as %s with size %s bytes", | ||
| 101 | disk_name, full_path, disk.min_size) | ||
| 102 | |||
| 103 | if cls.loader == 'grub': | ||
| 104 | cls._do_install_grub(creator, kernel_dir, | ||
| 105 | native_sysroot, full_path) | ||
| 106 | elif cls.loader == 'syslinux': | ||
| 107 | cls._do_install_syslinux(creator, bootimg_dir, | ||
| 108 | native_sysroot, full_path) | ||
| 109 | else: | ||
| 110 | raise WicError("boot loader some how not specified check do_prepare_partition") | ||
| 111 | |||
| 112 | @classmethod | ||
| 113 | def do_configure_partition(cls, part, source_params, creator, cr_workdir, | ||
| 114 | oe_builddir, bootimg_dir, kernel_dir, | ||
| 115 | native_sysroot): | ||
| 116 | try: | ||
| 117 | if source_params['loader-bios'] == 'grub': | ||
| 118 | cls._do_configure_grub(part, creator, cr_workdir) | ||
| 119 | elif source_params['loader-bios'] == 'syslinux': | ||
| 120 | cls._do_configure_syslinux(part, creator, cr_workdir) | ||
| 121 | else: | ||
| 122 | raise WicError("unrecognized bootimg_pcbios loader: %s" % source_params['loader-bios']) | ||
| 123 | except KeyError: | ||
| 124 | cls._do_configure_syslinux(part, creator, cr_workdir) | ||
| 125 | |||
| 126 | @classmethod | ||
| 127 | def do_prepare_partition(cls, part, source_params, creator, cr_workdir, | ||
| 128 | oe_builddir, bootimg_dir, kernel_dir, | ||
| 129 | rootfs_dir, native_sysroot): | ||
| 130 | try: | ||
| 131 | if source_params['loader-bios'] == 'grub': | ||
| 132 | cls._do_prepare_grub(part, cr_workdir, oe_builddir, | ||
| 133 | kernel_dir, rootfs_dir, native_sysroot) | ||
| 134 | elif source_params['loader-bios'] == 'syslinux': | ||
| 135 | cls._do_prepare_syslinux(part, cr_workdir, bootimg_dir, | ||
| 136 | kernel_dir, native_sysroot) | ||
| 137 | else: | ||
| 138 | raise WicError("unrecognized bootimg_pcbios loader: %s" % source_params['loader-bios']) | ||
| 139 | |||
| 140 | # Required by do_install_disk | ||
| 141 | cls.loader = source_params['loader-bios'] | ||
| 142 | except KeyError: | ||
| 143 | # Required by do_install_disk | ||
| 144 | cls.loader = 'syslinux' | ||
| 145 | cls._do_prepare_syslinux(part, cr_workdir, bootimg_dir, | ||
| 146 | kernel_dir, native_sysroot) | ||
| 147 | |||
| 148 | @classmethod | ||
| 149 | def _get_staging_libdir(cls): | ||
| 150 | """ | ||
| 151 | For unknown reasons when running test with poky | ||
| 152 | STAGING_LIBDIR gets unset when wic create is executed. | ||
| 153 | Bellow is a hack to determine what STAGING_LIBDIR should | ||
| 154 | be if not specified. | ||
| 155 | """ | ||
| 156 | |||
| 157 | staging_libdir = get_bitbake_var('STAGING_LIBDIR') | ||
| 158 | staging_dir_target = get_bitbake_var('STAGING_DIR_TARGET') | ||
| 159 | |||
| 160 | if not staging_libdir: | ||
| 161 | staging_libdir = '%s/usr/lib64' % staging_dir_target | ||
| 162 | if not os.path.isdir(staging_libdir): | ||
| 163 | staging_libdir = '%s/usr/lib32' % staging_dir_target | ||
| 164 | if not os.path.isdir(staging_libdir): | ||
| 165 | staging_libdir = '%s/usr/lib' % staging_dir_target | ||
| 166 | |||
| 167 | return staging_libdir | ||
| 168 | |||
| 169 | @classmethod | ||
| 170 | def _get_bootloader_config(cls, bootloader, loader): | ||
| 171 | custom_cfg = None | ||
| 172 | |||
| 173 | if bootloader.configfile: | ||
| 174 | custom_cfg = get_custom_config(bootloader.configfile) | ||
| 175 | if custom_cfg: | ||
| 176 | logger.debug("Using custom configuration file %s " | ||
| 177 | "for %s.cfg", bootloader.configfile, | ||
| 178 | loader) | ||
| 179 | return custom_cfg | ||
| 180 | else: | ||
| 181 | raise WicError("configfile is specified but failed to " | ||
| 182 | "get it from %s." % bootloader.configfile) | ||
| 183 | return custom_cfg | ||
| 184 | |||
| 185 | @classmethod | ||
| 186 | def _do_configure_syslinux(cls, part, creator, cr_workdir): | ||
| 187 | """ | ||
| 188 | Called before do_prepare_partition(), creates syslinux config | ||
| 189 | """ | ||
| 190 | |||
| 191 | hdddir = "%s/hdd/boot" % cr_workdir | ||
| 192 | |||
| 193 | install_cmd = "install -d %s" % hdddir | ||
| 194 | exec_cmd(install_cmd) | ||
| 195 | |||
| 196 | bootloader = creator.ks.bootloader | ||
| 197 | syslinux_conf = cls._get_bootloader_config(bootloader, 'syslinux') | ||
| 198 | |||
| 199 | if not syslinux_conf: | ||
| 200 | # Create syslinux configuration using parameters from wks file | ||
| 201 | splash = os.path.join(hdddir, "/splash.jpg") | ||
| 202 | if os.path.exists(splash): | ||
| 203 | splashline = "menu background splash.jpg" | ||
| 204 | else: | ||
| 205 | splashline = "" | ||
| 206 | |||
| 207 | # Set a default timeout if none specified to avoid | ||
| 208 | # 'None' being the value placed within the configuration | ||
| 209 | # file. | ||
| 210 | if not bootloader.timeout: | ||
| 211 | bootloader.timeout = 500 | ||
| 212 | |||
| 213 | # Set a default kernel params string if none specified | ||
| 214 | # to avoid 'None' being the value placed within the | ||
| 215 | # configuration file. | ||
| 216 | if not bootloader.append: | ||
| 217 | bootloader.append = "rootwait console=ttyS0,115200 console=tty0" | ||
| 218 | |||
| 219 | syslinux_conf = "" | ||
| 220 | syslinux_conf += "PROMPT 0\n" | ||
| 221 | syslinux_conf += "TIMEOUT " + str(bootloader.timeout) + "\n" | ||
| 222 | syslinux_conf += "\n" | ||
| 223 | syslinux_conf += "ALLOWOPTIONS 1\n" | ||
| 224 | syslinux_conf += "SERIAL 0 115200\n" | ||
| 225 | syslinux_conf += "\n" | ||
| 226 | if splashline: | ||
| 227 | syslinux_conf += "%s\n" % splashline | ||
| 228 | syslinux_conf += "DEFAULT boot\n" | ||
| 229 | syslinux_conf += "LABEL boot\n" | ||
| 230 | |||
| 231 | kernel = "/" + get_bitbake_var("KERNEL_IMAGETYPE") | ||
| 232 | syslinux_conf += "KERNEL " + kernel + "\n" | ||
| 233 | |||
| 234 | syslinux_conf += "APPEND label=boot root=%s %s\n" % \ | ||
| 235 | (creator.rootdev, bootloader.append) | ||
| 236 | |||
| 237 | logger.debug("Writing syslinux config %s/syslinux.cfg", hdddir) | ||
| 238 | cfg = open("%s/hdd/boot/syslinux.cfg" % cr_workdir, "w") | ||
| 239 | cfg.write(syslinux_conf) | ||
| 240 | cfg.close() | ||
| 241 | |||
| 242 | @classmethod | ||
| 243 | def _do_prepare_syslinux(cls, part, cr_workdir, bootimg_dir, | ||
| 244 | kernel_dir, native_sysroot): | ||
| 245 | """ | ||
| 246 | Called to do the actual content population for a partition i.e. it | ||
| 247 | 'prepares' the partition to be incorporated into the image. | ||
| 248 | In this case, prepare content for legacy bios boot partition. | ||
| 249 | """ | ||
| 250 | bootimg_dir = cls._get_bootimg_dir(bootimg_dir, 'syslinux') | ||
| 251 | |||
| 252 | staging_kernel_dir = kernel_dir | ||
| 253 | |||
| 254 | hdddir = "%s/hdd/boot" % cr_workdir | ||
| 255 | |||
| 256 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") | ||
| 257 | if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1": | ||
| 258 | if get_bitbake_var("INITRAMFS_IMAGE"): | ||
| 259 | kernel = "%s-%s.bin" % \ | ||
| 260 | (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) | ||
| 261 | |||
| 262 | cmds = ("install -m 0644 %s/%s %s/%s" % | ||
| 263 | (staging_kernel_dir, kernel, hdddir, get_bitbake_var("KERNEL_IMAGETYPE")), | ||
| 264 | "install -m 444 %s/syslinux/ldlinux.sys %s/ldlinux.sys" % | ||
| 265 | (bootimg_dir, hdddir), | ||
| 266 | "install -m 0644 %s/syslinux/vesamenu.c32 %s/vesamenu.c32" % | ||
| 267 | (bootimg_dir, hdddir), | ||
| 268 | "install -m 444 %s/syslinux/libcom32.c32 %s/libcom32.c32" % | ||
| 269 | (bootimg_dir, hdddir), | ||
| 270 | "install -m 444 %s/syslinux/libutil.c32 %s/libutil.c32" % | ||
| 271 | (bootimg_dir, hdddir)) | ||
| 272 | |||
| 273 | for install_cmd in cmds: | ||
| 274 | exec_cmd(install_cmd) | ||
| 275 | |||
| 276 | du_cmd = "du -bks %s" % hdddir | ||
| 277 | out = exec_cmd(du_cmd) | ||
| 278 | blocks = int(out.split()[0]) | ||
| 279 | |||
| 280 | extra_blocks = part.get_extra_block_count(blocks) | ||
| 281 | |||
| 282 | if extra_blocks < BOOTDD_EXTRA_SPACE: | ||
| 283 | extra_blocks = BOOTDD_EXTRA_SPACE | ||
| 284 | |||
| 285 | blocks += extra_blocks | ||
| 286 | |||
| 287 | logger.debug("Added %d extra blocks to %s to get to %d total blocks", | ||
| 288 | extra_blocks, part.mountpoint, blocks) | ||
| 289 | |||
| 290 | # dosfs image, created by mkdosfs | ||
| 291 | bootimg = "%s/boot%s.img" % (cr_workdir, part.lineno) | ||
| 292 | |||
| 293 | label = part.label if part.label else "boot" | ||
| 294 | |||
| 295 | dosfs_cmd = "mkdosfs -n %s -i %s -S 512 -C %s %d" % \ | ||
| 296 | (label, part.fsuuid, bootimg, blocks) | ||
| 297 | exec_native_cmd(dosfs_cmd, native_sysroot) | ||
| 298 | |||
| 299 | mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (bootimg, hdddir) | ||
| 300 | exec_native_cmd(mcopy_cmd, native_sysroot) | ||
| 301 | |||
| 302 | syslinux_cmd = "syslinux %s" % bootimg | ||
| 303 | exec_native_cmd(syslinux_cmd, native_sysroot) | ||
| 304 | |||
| 305 | chmod_cmd = "chmod 644 %s" % bootimg | ||
| 306 | exec_cmd(chmod_cmd) | ||
| 307 | |||
| 308 | du_cmd = "du -Lbks %s" % bootimg | ||
| 309 | out = exec_cmd(du_cmd) | ||
| 310 | bootimg_size = out.split()[0] | ||
| 311 | |||
| 312 | part.size = int(bootimg_size) | ||
| 313 | part.source_file = bootimg | ||
| 314 | |||
| 315 | @classmethod | ||
| 316 | def _do_install_syslinux(cls, creator, bootimg_dir, | ||
| 317 | native_sysroot, full_path): | ||
| 318 | """ | ||
| 319 | Called after all partitions have been prepared and assembled into a | ||
| 320 | disk image. In this case, we install the MBR. | ||
| 321 | """ | ||
| 322 | |||
| 323 | bootimg_dir = cls._get_bootimg_dir(bootimg_dir, 'syslinux') | ||
| 324 | mbrfile = "%s/syslinux/" % bootimg_dir | ||
| 325 | if creator.ptable_format == 'msdos': | ||
| 326 | mbrfile += "mbr.bin" | ||
| 327 | elif creator.ptable_format == 'gpt': | ||
| 328 | mbrfile += "gptmbr.bin" | ||
| 329 | else: | ||
| 330 | raise WicError("Unsupported partition table: %s" % | ||
| 331 | creator.ptable_format) | ||
| 332 | |||
| 333 | if not os.path.exists(mbrfile): | ||
| 334 | raise WicError("Couldn't find %s. If using the -e option, do you " | ||
| 335 | "have the right MACHINE set in local.conf? If not, " | ||
| 336 | "is the bootimg_dir path correct?" % mbrfile) | ||
| 337 | |||
| 338 | dd_cmd = "dd if=%s of=%s conv=notrunc" % (mbrfile, full_path) | ||
| 339 | exec_cmd(dd_cmd, native_sysroot) | ||
| 340 | |||
| 341 | @classmethod | ||
| 342 | def _do_configure_grub(cls, part, creator, cr_workdir): | ||
| 343 | hdddir = "%s/hdd" % cr_workdir | ||
| 344 | bootloader = creator.ks.bootloader | ||
| 345 | |||
| 346 | grub_conf = cls._get_bootloader_config(bootloader, 'grub') | ||
| 347 | |||
| 348 | grub_prefix_path = get_bitbake_var('GRUB_PREFIX_PATH') | ||
| 349 | if not grub_prefix_path: | ||
| 350 | grub_prefix_path = '/boot/grub' | ||
| 351 | |||
| 352 | grub_path = "%s/%s" %(hdddir, grub_prefix_path) | ||
| 353 | install_cmd = "install -d %s" % grub_path | ||
| 354 | exec_cmd(install_cmd) | ||
| 355 | |||
| 356 | if not grub_conf: | ||
| 357 | # Set a default timeout if none specified to avoid | ||
| 358 | # 'None' being the value placed within the configuration | ||
| 359 | # file. | ||
| 360 | if not bootloader.timeout: | ||
| 361 | bootloader.timeout = 500 | ||
| 362 | |||
| 363 | # Set a default kernel params string if none specified | ||
| 364 | # to avoid 'None' being the value placed within the | ||
| 365 | # configuration file. | ||
| 366 | if not bootloader.append: | ||
| 367 | bootloader.append = "rootwait rootfstype=%s " % (part.fstype) | ||
| 368 | bootloader.append += "console=ttyS0,115200 console=tty0" | ||
| 369 | |||
| 370 | kernel = "/boot/" + get_bitbake_var("KERNEL_IMAGETYPE") | ||
| 371 | |||
| 372 | grub_conf = 'serial --unit=0 --speed=115200 --word=8 --parity=no --stop=1\n' | ||
| 373 | grub_conf += 'set gfxmode=auto\n' | ||
| 374 | grub_conf += 'set gfxpayload=keep\n\n' | ||
| 375 | grub_conf += 'set default=0\n\n' | ||
| 376 | grub_conf += '# Boot automatically after %d secs.\n' % (bootloader.timeout) | ||
| 377 | grub_conf += 'set timeout=%d\n\n' % (bootloader.timeout) | ||
| 378 | grub_conf += 'menuentry \'default\' {\n' | ||
| 379 | grub_conf += '\tsearch --no-floppy --set=root --file %s\n' % (kernel) | ||
| 380 | grub_conf += '\tprobe --set partuuid --part-uuid ($root)\n' | ||
| 381 | grub_conf += '\tlinux %s root=PARTUUID=$partuuid %s\n}\n' % \ | ||
| 382 | (kernel, bootloader.append) | ||
| 383 | |||
| 384 | logger.debug("Writing grub config %s/grub.cfg", grub_path) | ||
| 385 | cfg = open("%s/grub.cfg" % grub_path, "w") | ||
| 386 | cfg.write(grub_conf) | ||
| 387 | cfg.close() | ||
| 388 | |||
| 389 | @classmethod | ||
| 390 | def _do_prepare_grub(cls, part, cr_workdir, oe_builddir, | ||
| 391 | kernel_dir, rootfs_dir, native_sysroot): | ||
| 392 | """ | ||
| 393 | 1. Generate embed.cfg that'll later be embedded into core.img. | ||
| 394 | So, that core.img knows where to search for grub.cfg. | ||
| 395 | 2. Generate core.img or grub stage 1.5. | ||
| 396 | 3. Copy modules into partition. | ||
| 397 | 4. Create partition rootfs file. | ||
| 398 | """ | ||
| 399 | |||
| 400 | hdddir = "%s/hdd" % cr_workdir | ||
| 401 | |||
| 402 | copy_types = [ '*.mod', '*.o', '*.lst' ] | ||
| 403 | |||
| 404 | builtin_modules = 'boot linux ext2 fat serial part_msdos part_gpt \ | ||
| 405 | normal multiboot probe biosdisk msdospart configfile search loadenv test' | ||
| 406 | |||
| 407 | staging_libdir = cls._get_staging_libdir() | ||
| 408 | |||
| 409 | grub_format = get_bitbake_var('GRUB_MKIMAGE_FORMAT_PC') | ||
| 410 | if not grub_format: | ||
| 411 | grub_format = 'i386-pc' | ||
| 412 | |||
| 413 | grub_prefix_path = get_bitbake_var('GRUB_PREFIX_PATH') | ||
| 414 | if not grub_prefix_path: | ||
| 415 | grub_prefix_path = '/boot/grub' | ||
| 416 | |||
| 417 | grub_path = "%s/%s" %(hdddir, grub_prefix_path) | ||
| 418 | core_img = '%s/grub-bios-core.img' % (kernel_dir) | ||
| 419 | grub_mods_path = '%s/grub/%s' % (staging_libdir, grub_format) | ||
| 420 | |||
| 421 | # Generate embedded grub config | ||
| 422 | embed_cfg_str = 'search.file %s/grub.cfg root\n' % (grub_prefix_path) | ||
| 423 | embed_cfg_str += 'set prefix=($root)%s\n' % (grub_prefix_path) | ||
| 424 | embed_cfg_str += 'configfile ($root)%s/grub.cfg\n' % (grub_prefix_path) | ||
| 425 | cfg = open('%s/embed.cfg' % (kernel_dir), 'w+') | ||
| 426 | cfg.write(embed_cfg_str) | ||
| 427 | cfg.close() | ||
| 428 | |||
| 429 | # core.img doesn't get included into boot partition | ||
| 430 | # it's later dd onto the resulting wic image. | ||
| 431 | grub_mkimage = 'grub-mkimage \ | ||
| 432 | --prefix=%s \ | ||
| 433 | --format=%s \ | ||
| 434 | --config=%s/embed.cfg \ | ||
| 435 | --directory=%s \ | ||
| 436 | --output=%s %s' % \ | ||
| 437 | (grub_prefix_path, grub_format, kernel_dir, | ||
| 438 | grub_mods_path, core_img, builtin_modules) | ||
| 439 | exec_native_cmd(grub_mkimage, native_sysroot) | ||
| 440 | |||
| 441 | # Copy grub modules | ||
| 442 | install_dir = '%s/%s/%s' % (hdddir, grub_prefix_path, grub_format) | ||
| 443 | os.makedirs(install_dir, exist_ok=True) | ||
| 444 | |||
| 445 | for ctype in copy_types: | ||
| 446 | files = glob('%s/grub/%s/%s' % \ | ||
| 447 | (staging_libdir, grub_format, ctype)) | ||
| 448 | for file in files: | ||
| 449 | shutil.copy2(file, install_dir, follow_symlinks=True) | ||
| 450 | |||
| 451 | # Create boot partition | ||
| 452 | logger.debug('Prepare partition using rootfs in %s', hdddir) | ||
| 453 | part.prepare_rootfs(cr_workdir, oe_builddir, hdddir, | ||
| 454 | native_sysroot, False) | ||
| 455 | |||
| 456 | @classmethod | ||
| 457 | def _do_install_grub(cls, creator, kernel_dir, | ||
| 458 | native_sysroot, full_path): | ||
| 459 | core_img = '%s/grub-bios-core.img' % (kernel_dir) | ||
| 460 | |||
| 461 | staging_libdir = cls._get_staging_libdir() | ||
| 462 | |||
| 463 | grub_format = get_bitbake_var('GRUB_MKIMAGE_FORMAT_PC') | ||
| 464 | if not grub_format: | ||
| 465 | grub_format = 'i386-pc' | ||
| 466 | |||
| 467 | boot_img = '%s/grub/%s/boot.img' % (staging_libdir, grub_format) | ||
| 468 | if not os.path.exists(boot_img): | ||
| 469 | raise WicError("Couldn't find %s. Did you include " | ||
| 470 | "do_image_wic[depends] += \"grub:do_populate_sysroot\" " | ||
| 471 | "in your image recipe" % boot_img) | ||
| 472 | |||
| 473 | # Install boot.img or grub stage 1 | ||
| 474 | dd_cmd = "dd if=%s of=%s conv=notrunc bs=1 seek=0 count=440" % (boot_img, full_path) | ||
| 475 | exec_cmd(dd_cmd, native_sysroot) | ||
| 476 | |||
| 477 | if creator.ptable_format == 'msdos': | ||
| 478 | # Install core.img or grub stage 1.5 | ||
| 479 | dd_cmd = "dd if=%s of=%s conv=notrunc bs=1 seek=512" % (core_img, full_path) | ||
| 480 | exec_cmd(dd_cmd, native_sysroot) | ||
| 481 | else: | ||
| 482 | raise WicError("Unsupported partition table: %s" % | ||
| 483 | creator.ptable_format) | ||
diff --git a/scripts/lib/wic/plugins/source/extra_partition.py b/scripts/lib/wic/plugins/source/extra_partition.py new file mode 100644 index 0000000000..d370b0107e --- /dev/null +++ b/scripts/lib/wic/plugins/source/extra_partition.py | |||
| @@ -0,0 +1,134 @@ | |||
| 1 | import logging | ||
| 2 | import os | ||
| 3 | import re | ||
| 4 | |||
| 5 | from glob import glob | ||
| 6 | |||
| 7 | from wic import WicError | ||
| 8 | from wic.pluginbase import SourcePlugin | ||
| 9 | from wic.misc import exec_cmd, get_bitbake_var | ||
| 10 | |||
| 11 | logger = logging.getLogger('wic') | ||
| 12 | |||
| 13 | class ExtraPartitionPlugin(SourcePlugin): | ||
| 14 | """ | ||
| 15 | Populates an extra partition with files listed in the IMAGE_EXTRA_PARTITION_FILES | ||
| 16 | BitBake variable. Files should be deployed to the DEPLOY_DIR_IMAGE directory. | ||
| 17 | |||
| 18 | The plugin supports: | ||
| 19 | - Glob pattern matching for file selection. | ||
| 20 | - File renaming. | ||
| 21 | - Suffixes to specify the target partition (by label, UUID, or partname), | ||
| 22 | enabling multiple extra partitions to coexist. | ||
| 23 | |||
| 24 | For example: | ||
| 25 | |||
| 26 | IMAGE_EXTRA_PARTITION_FILES_label-foo = "bar.conf;foo.conf" | ||
| 27 | IMAGE_EXTRA_PARTITION_FILES_uuid-e7d0824e-cda3-4bed-9f54-9ef5312d105d = "bar.conf;foobar.conf" | ||
| 28 | IMAGE_EXTRA_PARTITION_FILES = "foo/*" | ||
| 29 | WICVARS:append = "\ | ||
| 30 | IMAGE_EXTRA_PARTITION_FILES_label-foo \ | ||
| 31 | IMAGE_EXTRA_PARTITION_FILES_uuid-e7d0824e-cda3-4bed-9f54-9ef5312d105d \ | ||
| 32 | " | ||
| 33 | |||
| 34 | """ | ||
| 35 | |||
| 36 | name = 'extra_partition' | ||
| 37 | image_extra_partition_files_var_name = 'IMAGE_EXTRA_PARTITION_FILES' | ||
| 38 | |||
| 39 | @classmethod | ||
| 40 | def do_configure_partition(cls, part, source_params, cr, cr_workdir, | ||
| 41 | oe_builddir, bootimg_dir, kernel_dir, | ||
| 42 | native_sysroot): | ||
| 43 | """ | ||
| 44 | Called before do_prepare_partition(), list the files to copy | ||
| 45 | """ | ||
| 46 | extradir = "%s/extra.%d" % (cr_workdir, part.lineno) | ||
| 47 | install_cmd = "install -d %s" % extradir | ||
| 48 | exec_cmd(install_cmd) | ||
| 49 | |||
| 50 | if not kernel_dir: | ||
| 51 | kernel_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") | ||
| 52 | if not kernel_dir: | ||
| 53 | raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting") | ||
| 54 | |||
| 55 | extra_files = None | ||
| 56 | for (fmt, id) in (("_uuid-%s", part.uuid), ("_label-%s", part.label), ("_part-name-%s", part.part_name), (None, None)): | ||
| 57 | if fmt: | ||
| 58 | var = fmt % id | ||
| 59 | else: | ||
| 60 | var = "" | ||
| 61 | extra_files = get_bitbake_var(cls.image_extra_partition_files_var_name + var) | ||
| 62 | if extra_files is not None: | ||
| 63 | break | ||
| 64 | |||
| 65 | if extra_files is None: | ||
| 66 | raise WicError('No extra files defined, %s unset for entry #%d' % (cls.image_extra_partition_files_var_name, part.lineno)) | ||
| 67 | |||
| 68 | logger.info('Extra files: %s', extra_files) | ||
| 69 | |||
| 70 | # list of tuples (src_name, dst_name) | ||
| 71 | deploy_files = [] | ||
| 72 | for src_entry in re.findall(r'[\w;\-\./\*]+', extra_files): | ||
| 73 | if ';' in src_entry: | ||
| 74 | dst_entry = tuple(src_entry.split(';')) | ||
| 75 | if not dst_entry[0] or not dst_entry[1]: | ||
| 76 | raise WicError('Malformed extra file entry: %s' % src_entry) | ||
| 77 | else: | ||
| 78 | dst_entry = (src_entry, src_entry) | ||
| 79 | |||
| 80 | logger.debug('Destination entry: %r', dst_entry) | ||
| 81 | deploy_files.append(dst_entry) | ||
| 82 | |||
| 83 | cls.install_task = []; | ||
| 84 | for deploy_entry in deploy_files: | ||
| 85 | src, dst = deploy_entry | ||
| 86 | if '*' in src: | ||
| 87 | # by default install files under their basename | ||
| 88 | entry_name_fn = os.path.basename | ||
| 89 | if dst != src: | ||
| 90 | # unless a target name was given, then treat name | ||
| 91 | # as a directory and append a basename | ||
| 92 | entry_name_fn = lambda name: \ | ||
| 93 | os.path.join(dst, | ||
| 94 | os.path.basename(name)) | ||
| 95 | |||
| 96 | srcs = glob(os.path.join(kernel_dir, src)) | ||
| 97 | |||
| 98 | logger.debug('Globbed sources: %s', ', '.join(srcs)) | ||
| 99 | for entry in srcs: | ||
| 100 | src = os.path.relpath(entry, kernel_dir) | ||
| 101 | entry_dst_name = entry_name_fn(entry) | ||
| 102 | cls.install_task.append((src, entry_dst_name)) | ||
| 103 | else: | ||
| 104 | cls.install_task.append((src, dst)) | ||
| 105 | |||
| 106 | |||
| 107 | @classmethod | ||
| 108 | def do_prepare_partition(cls, part, source_params, cr, cr_workdir, | ||
| 109 | oe_builddir, bootimg_dir, kernel_dir, | ||
| 110 | rootfs_dir, native_sysroot): | ||
| 111 | """ | ||
| 112 | Called to do the actual content population for a partition i.e. it | ||
| 113 | 'prepares' the partition to be incorporated into the image. | ||
| 114 | In this case, we copies all files listed in IMAGE_EXTRA_PARTITION_FILES variable. | ||
| 115 | """ | ||
| 116 | extradir = "%s/extra.%d" % (cr_workdir, part.lineno) | ||
| 117 | |||
| 118 | if not kernel_dir: | ||
| 119 | kernel_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") | ||
| 120 | if not kernel_dir: | ||
| 121 | raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting") | ||
| 122 | |||
| 123 | for task in cls.install_task: | ||
| 124 | src_path, dst_path = task | ||
| 125 | logger.debug('Install %s as %s', src_path, dst_path) | ||
| 126 | install_cmd = "install -m 0644 -D %s %s" \ | ||
| 127 | % (os.path.join(kernel_dir, src_path), | ||
| 128 | os.path.join(extradir, dst_path)) | ||
| 129 | exec_cmd(install_cmd) | ||
| 130 | |||
| 131 | logger.debug('Prepare extra partition using rootfs in %s', extradir) | ||
| 132 | part.prepare_rootfs(cr_workdir, oe_builddir, extradir, | ||
| 133 | native_sysroot, False) | ||
| 134 | |||
diff --git a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/scripts/lib/wic/plugins/source/isoimage_isohybrid.py index 607356ad13..5d42eb5d3e 100644 --- a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py +++ b/scripts/lib/wic/plugins/source/isoimage_isohybrid.py | |||
| @@ -4,7 +4,7 @@ | |||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
| 5 | # | 5 | # |
| 6 | # DESCRIPTION | 6 | # DESCRIPTION |
| 7 | # This implements the 'isoimage-isohybrid' source plugin class for 'wic' | 7 | # This implements the 'isoimage_isohybrid' source plugin class for 'wic' |
| 8 | # | 8 | # |
| 9 | # AUTHORS | 9 | # AUTHORS |
| 10 | # Mihaly Varga <mihaly.varga (at] ni.com> | 10 | # Mihaly Varga <mihaly.varga (at] ni.com> |
| @@ -35,7 +35,7 @@ class IsoImagePlugin(SourcePlugin): | |||
| 35 | bootloader files. | 35 | bootloader files. |
| 36 | 36 | ||
| 37 | Example kickstart file: | 37 | Example kickstart file: |
| 38 | part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi, \\ | 38 | part /boot --source isoimage_isohybrid --sourceparams="loader=grub-efi, \\ |
| 39 | image_name= IsoImage" --ondisk cd --label LIVECD | 39 | image_name= IsoImage" --ondisk cd --label LIVECD |
| 40 | bootloader --timeout=10 --append=" " | 40 | bootloader --timeout=10 --append=" " |
| 41 | 41 | ||
| @@ -45,7 +45,7 @@ class IsoImagePlugin(SourcePlugin): | |||
| 45 | extension added by direct imeger plugin) and a file named IsoImage-cd.iso | 45 | extension added by direct imeger plugin) and a file named IsoImage-cd.iso |
| 46 | """ | 46 | """ |
| 47 | 47 | ||
| 48 | name = 'isoimage-isohybrid' | 48 | name = 'isoimage_isohybrid' |
| 49 | 49 | ||
| 50 | @classmethod | 50 | @classmethod |
| 51 | def do_configure_syslinux(cls, creator, cr_workdir): | 51 | def do_configure_syslinux(cls, creator, cr_workdir): |
| @@ -340,10 +340,10 @@ class IsoImagePlugin(SourcePlugin): | |||
| 340 | cls.do_configure_grubefi(part, creator, target_dir) | 340 | cls.do_configure_grubefi(part, creator, target_dir) |
| 341 | 341 | ||
| 342 | else: | 342 | else: |
| 343 | raise WicError("unrecognized bootimg-efi loader: %s" % | 343 | raise WicError("unrecognized bootimg_efi loader: %s" % |
| 344 | source_params['loader']) | 344 | source_params['loader']) |
| 345 | except KeyError: | 345 | except KeyError: |
| 346 | raise WicError("bootimg-efi requires a loader, none specified") | 346 | raise WicError("bootimg_efi requires a loader, none specified") |
| 347 | 347 | ||
| 348 | # Create efi.img that contains bootloader files for EFI booting | 348 | # Create efi.img that contains bootloader files for EFI booting |
| 349 | # if ISODIR didn't exist or didn't contains it | 349 | # if ISODIR didn't exist or didn't contains it |
diff --git a/scripts/lib/wic/plugins/source/rootfs.py b/scripts/lib/wic/plugins/source/rootfs.py index c990143c0d..06fce06bb1 100644 --- a/scripts/lib/wic/plugins/source/rootfs.py +++ b/scripts/lib/wic/plugins/source/rootfs.py | |||
| @@ -41,7 +41,7 @@ class RootfsPlugin(SourcePlugin): | |||
| 41 | # Disallow climbing outside of parent directory using '..', | 41 | # Disallow climbing outside of parent directory using '..', |
| 42 | # because doing so could be quite disastrous (we will delete the | 42 | # because doing so could be quite disastrous (we will delete the |
| 43 | # directory, or modify a directory outside OpenEmbedded). | 43 | # directory, or modify a directory outside OpenEmbedded). |
| 44 | full_path = os.path.realpath(os.path.join(rootfs_dir, path)) | 44 | full_path = os.path.abspath(os.path.join(rootfs_dir, path)) |
| 45 | if not full_path.startswith(os.path.realpath(rootfs_dir)): | 45 | if not full_path.startswith(os.path.realpath(rootfs_dir)): |
| 46 | logger.error("%s: Must point inside the rootfs: %s" % (cmd, path)) | 46 | logger.error("%s: Must point inside the rootfs: %s" % (cmd, path)) |
| 47 | sys.exit(1) | 47 | sys.exit(1) |
diff --git a/scripts/lz4c b/scripts/lz4c new file mode 100755 index 0000000000..466fc349e0 --- /dev/null +++ b/scripts/lz4c | |||
| @@ -0,0 +1,26 @@ | |||
| 1 | #!/usr/bin/env bash | ||
| 2 | |||
| 3 | # Wrapper to intercept legacy lz4c arguments and convert to lz4. | ||
| 4 | args=() | ||
| 5 | while [ $# -ne 0 ]; do | ||
| 6 | case ${1} in | ||
| 7 | -c0) | ||
| 8 | args+=(-0) | ||
| 9 | ;; | ||
| 10 | -c1) | ||
| 11 | args+=(-9) | ||
| 12 | ;; | ||
| 13 | -c2|-hc) | ||
| 14 | args+=(-12) | ||
| 15 | ;; | ||
| 16 | -y) | ||
| 17 | args+=(--force) | ||
| 18 | ;; | ||
| 19 | *) | ||
| 20 | args+=("${1}") | ||
| 21 | ;; | ||
| 22 | esac | ||
| 23 | shift | ||
| 24 | done | ||
| 25 | |||
| 26 | exec lz4 "${args[@]}" | ||
diff --git a/scripts/makefile-getvar b/scripts/makefile-getvar new file mode 100755 index 0000000000..4a07055e68 --- /dev/null +++ b/scripts/makefile-getvar | |||
| @@ -0,0 +1,24 @@ | |||
| 1 | #! /bin/sh | ||
| 2 | |||
| 3 | # Get a variable's value from a makefile: | ||
| 4 | # | ||
| 5 | # $ makefile-getvar Makefile VARIABLE VARIABLE ... | ||
| 6 | # | ||
| 7 | # If multiple variables are specified, they will be printed one per line. | ||
| 8 | # | ||
| 9 | # SPDX-FileCopyrightText: Copyright 2024 Arm Limited and/or its affiliates <open-source-office@arm.com> | ||
| 10 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 11 | |||
| 12 | set -eu | ||
| 13 | |||
| 14 | MAKEFILE=$1 | ||
| 15 | shift | ||
| 16 | |||
| 17 | for VARIABLE in $*; do | ||
| 18 | make -f - $VARIABLE.var <<EOF | ||
| 19 | include $MAKEFILE | ||
| 20 | |||
| 21 | %.var: | ||
| 22 | @echo \$(\$*) | ||
| 23 | EOF | ||
| 24 | done | ||
diff --git a/scripts/oe-build-perf-report b/scripts/oe-build-perf-report index 266700d294..a36f3c1bca 100755 --- a/scripts/oe-build-perf-report +++ b/scripts/oe-build-perf-report | |||
| @@ -336,10 +336,16 @@ def print_html_report(data, id_comp, buildstats): | |||
| 336 | test_i = test_data['tests'][test] | 336 | test_i = test_data['tests'][test] |
| 337 | meas_i = test_i['measurements'][meas] | 337 | meas_i = test_i['measurements'][meas] |
| 338 | commit_num = get_data_item(meta, 'layers.meta.commit_count') | 338 | commit_num = get_data_item(meta, 'layers.meta.commit_count') |
| 339 | commit = get_data_item(meta, 'layers.meta.commit') | ||
| 339 | # Add start_time for both test measurement types of sysres and disk usage | 340 | # Add start_time for both test measurement types of sysres and disk usage |
| 340 | start_time = test_i['start_time'][0] | 341 | try: |
| 342 | # Use the commit_time if available, falling back to start_time | ||
| 343 | start_time = get_data_item(meta, 'layers.meta.commit_time') | ||
| 344 | except KeyError: | ||
| 345 | start_time = test_i['start_time'][0] | ||
| 341 | samples.append(measurement_stats(meas_i, '', start_time)) | 346 | samples.append(measurement_stats(meas_i, '', start_time)) |
| 342 | samples[-1]['commit_num'] = commit_num | 347 | samples[-1]['commit_num'] = commit_num |
| 348 | samples[-1]['commit'] = commit | ||
| 343 | 349 | ||
| 344 | absdiff = samples[-1]['val_cls'](samples[-1]['mean'] - samples[id_comp]['mean']) | 350 | absdiff = samples[-1]['val_cls'](samples[-1]['mean'] - samples[id_comp]['mean']) |
| 345 | reldiff = absdiff * 100 / samples[id_comp]['mean'] | 351 | reldiff = absdiff * 100 / samples[id_comp]['mean'] |
diff --git a/scripts/oe-selftest b/scripts/oe-selftest index 18ac0f5869..afc48d9905 100755 --- a/scripts/oe-selftest +++ b/scripts/oe-selftest | |||
| @@ -18,8 +18,6 @@ | |||
| 18 | 18 | ||
| 19 | import os | 19 | import os |
| 20 | import sys | 20 | import sys |
| 21 | import argparse | ||
| 22 | import logging | ||
| 23 | 21 | ||
| 24 | scripts_path = os.path.dirname(os.path.realpath(__file__)) | 22 | scripts_path = os.path.dirname(os.path.realpath(__file__)) |
| 25 | lib_path = scripts_path + '/lib' | 23 | lib_path = scripts_path + '/lib' |
diff --git a/scripts/oe-setup-build b/scripts/oe-setup-build index c0476992a2..edbcd48355 100755 --- a/scripts/oe-setup-build +++ b/scripts/oe-setup-build | |||
| @@ -18,8 +18,7 @@ def makebuildpath(topdir, template): | |||
| 18 | 18 | ||
| 19 | def discover_templates(layers_file): | 19 | def discover_templates(layers_file): |
| 20 | if not os.path.exists(layers_file): | 20 | if not os.path.exists(layers_file): |
| 21 | print("List of layers {} does not exist; were the layers set up using the setup-layers script?".format(layers_file)) | 21 | raise Exception("List of layers {} does not exist; were the layers set up using the setup-layers script or bitbake-setup tool?".format(layers_file)) |
| 22 | return None | ||
| 23 | 22 | ||
| 24 | templates = [] | 23 | templates = [] |
| 25 | layers_list = json.load(open(layers_file))["layers"] | 24 | layers_list = json.load(open(layers_file))["layers"] |
| @@ -77,8 +76,7 @@ def find_template(template_name, templates): | |||
| 77 | for t in templates: | 76 | for t in templates: |
| 78 | if t["templatename"] == template_name: | 77 | if t["templatename"] == template_name: |
| 79 | return t | 78 | return t |
| 80 | print("Configuration {} is not one of {}, please try again.".format(tempalte_name, [t["templatename"] for t in templates])) | 79 | raise Exception("Configuration {} is not one of {}, please try again.".format(template_name, [t["templatename"] for t in templates])) |
| 81 | return None | ||
| 82 | 80 | ||
| 83 | def setup_build_env(args): | 81 | def setup_build_env(args): |
| 84 | templates = discover_templates(args.layerlist) | 82 | templates = discover_templates(args.layerlist) |
| @@ -91,7 +89,7 @@ def setup_build_env(args): | |||
| 91 | builddir = args.b if args.b else template["buildpath"] | 89 | builddir = args.b if args.b else template["buildpath"] |
| 92 | no_shell = args.no_shell | 90 | no_shell = args.no_shell |
| 93 | coredir = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')) | 91 | coredir = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')) |
| 94 | cmd_base = ". {} {}".format(os.path.join(coredir, 'oe-init-build-env'), os.path.abspath(builddir)) | 92 | cmd_base = "cd {}\nset {}\n. ./oe-init-build-env\n".format(coredir, os.path.abspath(builddir)) |
| 95 | 93 | ||
| 96 | initbuild = os.path.join(builddir, 'init-build-env') | 94 | initbuild = os.path.join(builddir, 'init-build-env') |
| 97 | if not os.path.exists(initbuild): | 95 | if not os.path.exists(initbuild): |
| @@ -102,9 +100,9 @@ def setup_build_env(args): | |||
| 102 | 100 | ||
| 103 | cmd = "TEMPLATECONF={} {}".format(template["templatepath"], cmd_base) | 101 | cmd = "TEMPLATECONF={} {}".format(template["templatepath"], cmd_base) |
| 104 | if not no_shell: | 102 | if not no_shell: |
| 105 | cmd = cmd + " && {}".format(os.environ['SHELL']) | 103 | cmd = cmd + " && {}".format(os.environ.get('SHELL','bash')) |
| 106 | print("Running:", cmd) | 104 | print("Running:", cmd) |
| 107 | subprocess.run(cmd, shell=True, executable=os.environ['SHELL']) | 105 | subprocess.run(cmd, shell=True, executable=os.environ.get('SHELL','bash')) |
| 108 | 106 | ||
| 109 | parser = argparse.ArgumentParser(description="A script that discovers available build configurations and sets up a build environment based on one of them. Run without arguments to choose one interactively.") | 107 | parser = argparse.ArgumentParser(description="A script that discovers available build configurations and sets up a build environment based on one of them. Run without arguments to choose one interactively.") |
| 110 | parser.add_argument("--layerlist", default=defaultlayers(), help='Where to look for available layers (as written out by setup-layers script) (default is {}).'.format(defaultlayers())) | 108 | parser.add_argument("--layerlist", default=defaultlayers(), help='Where to look for available layers (as written out by setup-layers script) (default is {}).'.format(defaultlayers())) |
diff --git a/scripts/oe-test b/scripts/oe-test index 55985b0b24..3a00369e01 100755 --- a/scripts/oe-test +++ b/scripts/oe-test | |||
| @@ -7,14 +7,18 @@ | |||
| 7 | # SPDX-License-Identifier: MIT | 7 | # SPDX-License-Identifier: MIT |
| 8 | # | 8 | # |
| 9 | 9 | ||
| 10 | import os | ||
| 11 | import sys | ||
| 12 | import argparse | 10 | import argparse |
| 11 | import glob | ||
| 13 | import logging | 12 | import logging |
| 13 | import os | ||
| 14 | import sys | ||
| 14 | 15 | ||
| 15 | scripts_path = os.path.dirname(os.path.realpath(__file__)) | 16 | scripts_path = os.path.dirname(os.path.realpath(__file__)) |
| 16 | lib_path = scripts_path + '/lib' | 17 | lib_path = os.path.join(scripts_path, 'lib') |
| 17 | sys.path = sys.path + [lib_path] | 18 | sys.path.append(lib_path) |
| 19 | meta_lib_paths = glob.glob(scripts_path + '/*/lib', recursive=True) | ||
| 20 | for p in meta_lib_paths: | ||
| 21 | sys.path.append(p) | ||
| 18 | import argparse_oe | 22 | import argparse_oe |
| 19 | import scriptutils | 23 | import scriptutils |
| 20 | 24 | ||
diff --git a/scripts/patchtest b/scripts/patchtest index 0be7062dc2..9218db232a 100755 --- a/scripts/patchtest +++ b/scripts/patchtest | |||
| @@ -9,12 +9,12 @@ | |||
| 9 | # SPDX-License-Identifier: GPL-2.0-only | 9 | # SPDX-License-Identifier: GPL-2.0-only |
| 10 | # | 10 | # |
| 11 | 11 | ||
| 12 | import sys | 12 | import json |
| 13 | import os | ||
| 14 | import unittest | ||
| 15 | import logging | 13 | import logging |
| 14 | import os | ||
| 15 | import sys | ||
| 16 | import traceback | 16 | import traceback |
| 17 | import json | 17 | import unittest |
| 18 | 18 | ||
| 19 | # Include current path so test cases can see it | 19 | # Include current path so test cases can see it |
| 20 | sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) | 20 | sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) |
| @@ -22,16 +22,17 @@ sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) | |||
| 22 | # Include patchtest library | 22 | # Include patchtest library |
| 23 | sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '../meta/lib/patchtest')) | 23 | sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '../meta/lib/patchtest')) |
| 24 | 24 | ||
| 25 | from data import PatchTestInput | 25 | from patchtest_parser import PatchtestParser |
| 26 | from repo import PatchTestRepo | 26 | from repo import PatchTestRepo |
| 27 | 27 | ||
| 28 | import utils | 28 | logger = logging.getLogger("patchtest") |
| 29 | logger = utils.logger_create('patchtest') | 29 | loggerhandler = logging.StreamHandler() |
| 30 | loggerhandler.setFormatter(logging.Formatter("%(message)s")) | ||
| 31 | logger.addHandler(loggerhandler) | ||
| 32 | logger.setLevel(logging.INFO) | ||
| 30 | info = logger.info | 33 | info = logger.info |
| 31 | error = logger.error | 34 | error = logger.error |
| 32 | 35 | ||
| 33 | import repo | ||
| 34 | |||
| 35 | def getResult(patch, mergepatch, logfile=None): | 36 | def getResult(patch, mergepatch, logfile=None): |
| 36 | 37 | ||
| 37 | class PatchTestResult(unittest.TextTestResult): | 38 | class PatchTestResult(unittest.TextTestResult): |
| @@ -46,10 +47,10 @@ def getResult(patch, mergepatch, logfile=None): | |||
| 46 | def startTestRun(self): | 47 | def startTestRun(self): |
| 47 | # let's create the repo already, it can be used later on | 48 | # let's create the repo already, it can be used later on |
| 48 | repoargs = { | 49 | repoargs = { |
| 49 | 'repodir': PatchTestInput.repodir, | 50 | "repodir": PatchtestParser.repodir, |
| 50 | 'commit' : PatchTestInput.basecommit, | 51 | "commit": PatchtestParser.basecommit, |
| 51 | 'branch' : PatchTestInput.basebranch, | 52 | "branch": PatchtestParser.basebranch, |
| 52 | 'patch' : patch, | 53 | "patch": patch, |
| 53 | } | 54 | } |
| 54 | 55 | ||
| 55 | self.repo_error = False | 56 | self.repo_error = False |
| @@ -57,7 +58,7 @@ def getResult(patch, mergepatch, logfile=None): | |||
| 57 | self.test_failure = False | 58 | self.test_failure = False |
| 58 | 59 | ||
| 59 | try: | 60 | try: |
| 60 | self.repo = PatchTestInput.repo = PatchTestRepo(**repoargs) | 61 | self.repo = PatchtestParser.repo = PatchTestRepo(**repoargs) |
| 61 | except: | 62 | except: |
| 62 | logger.error(traceback.print_exc()) | 63 | logger.error(traceback.print_exc()) |
| 63 | self.repo_error = True | 64 | self.repo_error = True |
| @@ -128,7 +129,11 @@ def _runner(resultklass, prefix=None): | |||
| 128 | loader.testMethodPrefix = prefix | 129 | loader.testMethodPrefix = prefix |
| 129 | 130 | ||
| 130 | # create the suite with discovered tests and the corresponding runner | 131 | # create the suite with discovered tests and the corresponding runner |
| 131 | suite = loader.discover(start_dir=PatchTestInput.testdir, pattern=PatchTestInput.pattern, top_level_dir=PatchTestInput.topdir) | 132 | suite = loader.discover( |
| 133 | start_dir=PatchtestParser.testdir, | ||
| 134 | pattern=PatchtestParser.pattern, | ||
| 135 | top_level_dir=PatchtestParser.topdir, | ||
| 136 | ) | ||
| 132 | ntc = suite.countTestCases() | 137 | ntc = suite.countTestCases() |
| 133 | 138 | ||
| 134 | # if there are no test cases, just quit | 139 | # if there are no test cases, just quit |
| @@ -160,24 +165,31 @@ def run(patch, logfile=None): | |||
| 160 | postmerge_resultklass = getResult(patch, True, logfile) | 165 | postmerge_resultklass = getResult(patch, True, logfile) |
| 161 | postmerge_result = _runner(postmerge_resultklass, 'test') | 166 | postmerge_result = _runner(postmerge_resultklass, 'test') |
| 162 | 167 | ||
| 163 | print('----------------------------------------------------------------------\n') | 168 | print_result_message(premerge_result, postmerge_result) |
| 164 | if premerge_result == 2 and postmerge_result == 2: | ||
| 165 | logger.error('patchtest: No test cases found - did you specify the correct suite directory?') | ||
| 166 | if premerge_result == 1 or postmerge_result == 1: | ||
| 167 | logger.error('WARNING: patchtest: At least one patchtest caused a failure or an error - please check https://wiki.yoctoproject.org/wiki/Patchtest for further guidance') | ||
| 168 | else: | ||
| 169 | logger.info('OK: patchtest: All patchtests passed') | ||
| 170 | print('----------------------------------------------------------------------\n') | ||
| 171 | return premerge_result or postmerge_result | 169 | return premerge_result or postmerge_result |
| 172 | 170 | ||
| 171 | def print_result_message(preresult, postresult): | ||
| 172 | print("----------------------------------------------------------------------\n") | ||
| 173 | if preresult == 2 and postresult == 2: | ||
| 174 | logger.error( | ||
| 175 | "patchtest: No test cases found - did you specify the correct suite directory?" | ||
| 176 | ) | ||
| 177 | if preresult == 1 or postresult == 1: | ||
| 178 | logger.error( | ||
| 179 | "WARNING: patchtest: At least one patchtest caused a failure or an error - please check https://wiki.yoctoproject.org/wiki/Patchtest for further guidance" | ||
| 180 | ) | ||
| 181 | else: | ||
| 182 | logger.info("OK: patchtest: All patchtests passed") | ||
| 183 | print("----------------------------------------------------------------------\n") | ||
| 184 | |||
| 173 | def main(): | 185 | def main(): |
| 174 | tmp_patch = False | 186 | tmp_patch = False |
| 175 | patch_path = PatchTestInput.patch_path | 187 | patch_path = PatchtestParser.patch_path |
| 176 | log_results = PatchTestInput.log_results | 188 | log_results = PatchtestParser.log_results |
| 177 | log_path = None | 189 | log_path = None |
| 178 | patch_list = None | 190 | patch_list = None |
| 179 | 191 | ||
| 180 | git_status = os.popen("(cd %s && git status)" % PatchTestInput.repodir).read() | 192 | git_status = os.popen("(cd %s && git status)" % PatchtestParser.repodir).read() |
| 181 | status_matches = ["Changes not staged for commit", "Changes to be committed"] | 193 | status_matches = ["Changes not staged for commit", "Changes to be committed"] |
| 182 | if any([match in git_status for match in status_matches]): | 194 | if any([match in git_status for match in status_matches]): |
| 183 | logger.error("patchtest: there are uncommitted changes in the target repo that would be overwritten. Please commit or restore them before running patchtest") | 195 | logger.error("patchtest: there are uncommitted changes in the target repo that would be overwritten. Please commit or restore them before running patchtest") |
| @@ -212,16 +224,16 @@ def main(): | |||
| 212 | if __name__ == '__main__': | 224 | if __name__ == '__main__': |
| 213 | ret = 1 | 225 | ret = 1 |
| 214 | 226 | ||
| 215 | # Parse the command line arguments and store it on the PatchTestInput namespace | 227 | # Parse the command line arguments and store it on the PatchtestParser namespace |
| 216 | PatchTestInput.set_namespace() | 228 | PatchtestParser.set_namespace() |
| 217 | 229 | ||
| 218 | # set debugging level | 230 | # set debugging level |
| 219 | if PatchTestInput.debug: | 231 | if PatchtestParser.debug: |
| 220 | logger.setLevel(logging.DEBUG) | 232 | logger.setLevel(logging.DEBUG) |
| 221 | 233 | ||
| 222 | # if topdir not define, default it to testdir | 234 | # if topdir not define, default it to testdir |
| 223 | if not PatchTestInput.topdir: | 235 | if not PatchtestParser.topdir: |
| 224 | PatchTestInput.topdir = PatchTestInput.testdir | 236 | PatchtestParser.topdir = PatchtestParser.testdir |
| 225 | 237 | ||
| 226 | try: | 238 | try: |
| 227 | ret = main() | 239 | ret = main() |
diff --git a/scripts/patchtest.README b/scripts/patchtest.README index 76b5fcdb6d..fc1267f053 100644 --- a/scripts/patchtest.README +++ b/scripts/patchtest.README | |||
| @@ -3,62 +3,105 @@ | |||
| 3 | ## Introduction | 3 | ## Introduction |
| 4 | 4 | ||
| 5 | Patchtest is a test framework for community patches based on the standard | 5 | Patchtest is a test framework for community patches based on the standard |
| 6 | unittest python module. As input, it needs tree elements to work properly: | 6 | unittest python module. As input, it needs three elements to work properly: |
| 7 | a patch in mbox format (either created with `git format-patch` or fetched | 7 | |
| 8 | from 'patchwork'), a test suite and a target repository. | 8 | - one or more patches in separate, mbox-formated files |
| 9 | - a test suite | ||
| 10 | - a target repository | ||
| 9 | 11 | ||
| 10 | The first test suite intended to be used with patchtest is found in the | 12 | The first test suite intended to be used with patchtest is found in the |
| 11 | openembedded-core repository [1] targeted for patches that get into the | 13 | openembedded-core repository [1], targeted for patches that get into the |
| 12 | openembedded-core mailing list [2]. This suite is also intended as a | 14 | openembedded-core mailing list [2]. This suite is also intended as a |
| 13 | baseline for development of similar suites for other layers as needed. | 15 | baseline for development of similar suites for other layers as needed. |
| 14 | 16 | ||
| 15 | Patchtest can either run on a host or a guest machine, depending on which | 17 | Patchtest can either run on a host or a guest machine, depending on |
| 16 | environment the execution needs to be done. If you plan to test your own patches | 18 | which environment you prefer. If you plan to test your own patches (a |
| 17 | (a good practice before these are sent to the mailing list), the easiest way is | 19 | good practice before these are sent to the mailing list), the easiest |
| 18 | to install and execute on your local host; in the other hand, if automatic | 20 | way is to install and execute on your local host; on the other hand, if |
| 19 | testing is intended, the guest method is strongly recommended. The guest | 21 | automatic testing is intended, the guest method is strongly recommended. |
| 20 | method requires the use of the patchtest layer, in addition to the tools | 22 | The guest method requires the use of the meta-patchtest layer, in |
| 21 | available in oe-core: https://git.yoctoproject.org/patchtest/ | 23 | addition to the tools available in oe-core: |
| 24 | https://git.yoctoproject.org/meta-patchtest/ | ||
| 22 | 25 | ||
| 23 | ## Installation | 26 | ## Installation |
| 24 | 27 | ||
| 25 | As a tool for use with the Yocto Project, the [quick start guide](https://docs.yoctoproject.org/brief-yoctoprojectqs/index.html) | 28 | Patchtest checks patches which are expected to be applied to |
| 26 | contains the necessary prerequisites for a basic project. In addition, | 29 | Yocto layers. Therefore, familiarity with the Yocto Project, its |
| 27 | patchtest relies on the following Python modules: | 30 | functionality, and image-build processes is assumed. Otherwise the |
| 28 | 31 | [quick start guide](https://docs.yoctoproject.org/brief-yoctoprojectqs/index.html) | |
| 29 | - boto3 (for sending automated results emails only) | 32 | contains the necessary information. |
| 30 | - git-pw>=2.5.0 | 33 | |
| 31 | - jinja2 | 34 | As a Python application, the recommended way it should be installed |
| 32 | - pylint | 35 | (following Python best practices) is to use a virtual environment. A |
| 33 | - pyparsing>=3.0.9 | 36 | Python virtual environment provides a convenient sandbox into which its |
| 34 | - unidiff | 37 | requirements can also be installed with out affecting a user's entire |
| 35 | 38 | system at large. Patchtest makes use of a test suite found in oe-core, | |
| 36 | These can be installed by running `pip install -r | 39 | so it needs to be available as well. |
| 37 | meta/lib/patchtest/requirements.txt`. Note that git-pw is not | 40 | |
| 38 | automatically added to the user's PATH; by default, it is installed at | 41 | For example, to install patchtest into a Python virtual environment |
| 39 | ~/.local/bin/git-pw. | 42 | called "patchtest.venv" located at some/where in your filesystem: |
| 40 | 43 | ||
| 41 | For git-pw (and therefore scripts such as patchtest-get--series) to work, you need | 44 | $ mkdir -p some/where |
| 42 | to provide a Patchwork instance in your user's .gitconfig, like so (the project | 45 | $ cd some/where |
| 43 | can be specified using the --project argument): | 46 | $ mkdir yocto |
| 47 | $ pushd yocto | ||
| 48 | $ git clone https://git.openembedded.org/openembedded-core | ||
| 49 | $ git clone https://git.openembedded.org/bitbake | ||
| 50 | $ git clone https://git.yoctoproject.org/meta-patchtest | ||
| 51 | $ popd | ||
| 52 | $ . yocto/openembedded-core/oe-init-build-env build yocto/bitbake | ||
| 53 | $ cd .. | ||
| 54 | $ python3 -m venv patchtest.venv | ||
| 55 | $ . patchtest.venv/bin/activate | ||
| 56 | (patchtest.venv) $ pip install -r yocto/openembedded-core/meta/lib/patchtest/requirements.txt | ||
| 57 | |||
| 58 | In the above list of cloned repositories, the meta-patchtest layer is | ||
| 59 | only needed if you intend to use patchtest in "guest" mode. Also the | ||
| 60 | oe-core + bitbake clones can be replaced with poky instead. | ||
| 61 | |||
| 62 | If "guest" mode will be used, the meta-patchtest layer needs to be added | ||
| 63 | to the conf/bblayers.conf file generated above. | ||
| 64 | |||
| 65 | If you would like to run the patchtest selftest found in oe-core, the | ||
| 66 | openembedded-core/meta-selftest (or poky/meta-selftest, if using poky) | ||
| 67 | layer also needs to be added to bblayers.conf. | ||
| 68 | |||
| 69 | Once the installation is done, your directory layout will look like: | ||
| 70 | . | ||
| 71 | ├── build | ||
| 72 | │  └── conf | ||
| 73 | ├── yocto | ||
| 74 | │  ├── bitbake | ||
| 75 | │  ├── meta-patchtest | ||
| 76 | │  └── openembedded-core | ||
| 77 | └── patchtest.venv | ||
| 78 | ├── bin | ||
| 79 | ├── include | ||
| 80 | ├── lib | ||
| 81 | ├── lib64 -> lib | ||
| 82 | ├── pyvenv.cfg | ||
| 83 | └── share | ||
| 84 | |||
| 85 | For git-pw (and therefore scripts such as patchtest-get-series) to work, you need | ||
| 86 | to provide a Patchwork instance in your user's .gitconfig, like so (alternatively | ||
| 87 | the project can be specified using the --project argument to git-pw on its | ||
| 88 | cmdline): | ||
| 44 | 89 | ||
| 45 | git config --global pw.server "https://patchwork.yoctoproject.org/api/1.2/" | 90 | git config --global pw.server "https://patchwork.yoctoproject.org/api/1.2/" |
| 46 | 91 | ||
| 47 | To work with patchtest, you should have the following repositories cloned: | ||
| 48 | |||
| 49 | 1. https://git.openembedded.org/openembedded-core/ (or https://git.yoctoproject.org/poky/) | ||
| 50 | 2. https://git.openembedded.org/bitbake/ (if not using poky) | ||
| 51 | 3. https://git.yoctoproject.org/patchtest (if using guest mode) | ||
| 52 | |||
| 53 | ## Usage | 92 | ## Usage |
| 54 | 93 | ||
| 55 | ### Obtaining Patches | 94 | ### Obtaining Patches |
| 56 | 95 | ||
| 57 | Patch files can be obtained directly from cloned repositories using `git | 96 | Separate, mbox-formatted patch files can be obtained in a number of |
| 58 | format-patch -N` (where N is the number of patches starting from HEAD to | 97 | ways: |
| 59 | generate). git-pw can also be used with filters for users, patch/series IDs, | 98 | |
| 60 | and timeboxes if specific patches are desired. For more information, see the | 99 | - using b4 to obtain patches from a lore server |
| 61 | git-pw [documentation](https://patchwork.readthedocs.io/projects/git-pw/en/latest/). | 100 | - using git-pw to obtain patches from a patchwork server |
| 101 | - using "git format-patch ..." to create patches from a git | ||
| 102 | repository | ||
| 103 | - using an email program, such as mutt or thunderbird, to obtain | ||
| 104 | patches from a mailing list | ||
| 62 | 105 | ||
| 63 | Alternatively, `scripts/patchtest-get-series` can be used to pull mbox files from | 106 | Alternatively, `scripts/patchtest-get-series` can be used to pull mbox files from |
| 64 | the Patchwork instance configured previously in .gitconfig. It uses a log file | 107 | the Patchwork instance configured previously in .gitconfig. It uses a log file |
| @@ -69,29 +112,111 @@ the target project, but these parameters can be configured using the `--limit`, | |||
| 69 | `--interval`, and `--project` arguments respectively. For more information, run | 112 | `--interval`, and `--project` arguments respectively. For more information, run |
| 70 | `patchtest-get-series -h`. | 113 | `patchtest-get-series -h`. |
| 71 | 114 | ||
| 115 | #### git-pw | ||
| 116 | |||
| 117 | git-pw can be used with filters for users, patch/series IDs, and | ||
| 118 | timeboxes if specific patches are desired. For more information, see the | ||
| 119 | git-pw [documentation](https://patchwork.readthedocs.io/projects/git-pw/en/latest/). | ||
| 120 | |||
| 121 | For example, to download a single patch from the Yocto Project's | ||
| 122 | Patchwork server (and to demonstrate not having modified ~/.gitconfig): | ||
| 123 | |||
| 124 | (patchtest.venv) $ mkdir gawk | ||
| 125 | (patchtest.venv) $ git-pw --server https://patchwork.yoctoproject.org/api/1.2/ --project oe-core patch download --mbox 70101 gawk/ | ||
| 126 | |||
| 127 | To download a series, for example, try: | ||
| 128 | |||
| 129 | (patchtest.venv) $ mkdir clang | ||
| 130 | (patchtest.venv) $ git-pw --server https://patchwork.yoctoproject.org/api/1.2 --project oe-core series download --separate 38107 clang/ | ||
| 131 | |||
| 132 | #### git format-patch | ||
| 133 | |||
| 134 | Patch files can be obtained directly from a git repository using `git | ||
| 135 | format-patch -N` (where N is the number of patches starting from HEAD to | ||
| 136 | generate) or using any other way of specifying a range of commit SHAs to | ||
| 137 | git. | ||
| 138 | |||
| 139 | This method would be the most likely used when testing patches in local | ||
| 140 | mode before emailing them for review. | ||
| 141 | |||
| 142 | #### b4 | ||
| 143 | |||
| 144 | In order to use b4, it needs to be installed. Fortunately it is a Python | ||
| 145 | program that is hosted on pypi and can easily be installed into the same | ||
| 146 | Python virtual environment that was created to run patchwork: | ||
| 147 | |||
| 148 | (patchtest.venv) $ pip install b4 | ||
| 149 | (patchtest.venv) $ b4 --version | ||
| 150 | 0.14.2 | ||
| 151 | |||
| 152 | To fetch the same single patch using b4 that was fetched with git-pw | ||
| 153 | earlier, use: | ||
| 154 | |||
| 155 | (patchtest.venv) $ mkdir gawk-2 | ||
| 156 | (patchtest.venv) $ b4 am -o gawk-2 https://lore.kernel.org/openembedded-core/20250912200740.2873851-1-Randy.MacLeod@windriver.com | ||
| 157 | |||
| 158 | Fetching a patch series with b4 is a little more involved since b4 will | ||
| 159 | create one mbox file with all the patches in the series in it. Given an | ||
| 160 | mbox file with more than one patch in it, patchtest will only test the | ||
| 161 | first one. So there needs to be a separate step to break apart the | ||
| 162 | multiple patches into separate files: | ||
| 163 | |||
| 164 | (patchtest.venv) $ mkdir clang-2 | ||
| 165 | (patchtest.venv) $ b4 am -o ross https://lore.kernel.org/openembedded-core/20250914133258.2625735-1-ross.burton@arm.com | ||
| 166 | (patchtest.venv) $ cat clang-2/v2_20250914_ross_burton_clang_improve_opt_viewer_packageconfig.mbx | formail -ds sh -c 'cat > ross/msg.$FILENO' | ||
| 167 | |||
| 168 | NOTE: the formail utility is part of the procmail package for most Linux | ||
| 169 | distributions. | ||
| 170 | |||
| 171 | #### mail user agents (MUA) | ||
| 172 | |||
| 173 | Most email applications have a way of saving patch emails. Details for | ||
| 174 | each MUA is beyond the scope of this document, but it is possible in | ||
| 175 | most cases. The only catch is that each patch has to be saved in mbox | ||
| 176 | format in its own individual file. Some client applications prefer to | ||
| 177 | save emails in the Maildir format, and some programs will save a set of | ||
| 178 | patches into one mbox file. The formail program from the procmail | ||
| 179 | package is useful for manipulating and converting between formats and | ||
| 180 | storage formats. | ||
| 181 | |||
| 72 | ### Host Mode | 182 | ### Host Mode |
| 73 | 183 | ||
| 74 | To run patchtest on the host, do the following: | 184 | To run patchtest in "host" mode, do the following: |
| 185 | |||
| 186 | 1. Using openembedded-core or poky, do the following if re-using the | ||
| 187 | installation given in the installation procedure above: | ||
| 188 | |||
| 189 | $ . yocto/openembedded-core/oe-init-build-env build yocto/bitbake | ||
| 190 | |||
| 191 | or | ||
| 75 | 192 | ||
| 76 | 1. In openembedded-core/poky, do `source oe-init-build-env` | 193 | $ . yocto/poky/oe-init-build-env |
| 77 | 2. Generate patch files from the target repository by doing `git-format patch -N`, | ||
| 78 | where N is the number of patches starting at HEAD, or by using git-pw | ||
| 79 | or patchtest-get-series | ||
| 80 | 3. Run patchtest on a patch file by doing the following: | ||
| 81 | 194 | ||
| 82 | patchtest --patch /path/to/patch/file | 195 | 2. Activate the Python virtual environment that was created using the |
| 196 | steps from the installation procedure given above: | ||
| 197 | |||
| 198 | $ . patchtest.venv/bin/activate | ||
| 199 | |||
| 200 | 3. Obtain one or more patches (see section above) | ||
| 201 | |||
| 202 | 4. Run patchtest on a patch file by doing the following: | ||
| 203 | |||
| 204 | (patchtest.venv) $ patchtest --patch /path/to/patch/file | ||
| 83 | 205 | ||
| 84 | or, if you have stored the patch files in a directory, do: | 206 | or, if you have stored the patch files in a directory, do: |
| 85 | 207 | ||
| 86 | patchtest --directory /path/to/patch/directory | 208 | (patchtest.venv) $ patchtest --directory /path/to/patch/directory |
| 87 | 209 | ||
| 88 | For example, to test `master-gcc-Fix--fstack-protector-issue-on-aarch64.patch` against the oe-core test suite: | 210 | For example, to test |
| 211 | `master-gcc-Fix--fstack-protector-issue-on-aarch64.patch` against the | ||
| 212 | oe-core test suite: | ||
| 89 | 213 | ||
| 90 | patchtest --patch master-gcc-Fix--fstack-protector-issue-on-aarch64.patch | 214 | (patchtest.venv) $ patchtest --patch master-gcc-Fix--fstack-protector-issue-on-aarch64.patch |
| 91 | 215 | ||
| 92 | If you want to use a different test suite or target repository, you can use the --testdir and --repodir flags: | 216 | If you want to use a different test suite or target repository, you |
| 217 | can use the --testdir and --repodir flags: | ||
| 93 | 218 | ||
| 94 | patchtest --patch /path/to/patch/file --repodir /path/to/repo --testdir /path/to/test/dir | 219 | (patchtest.venv) $ patchtest --patch /path/to/patch/file --repodir /path/to/repo --testdir /path/to/test/dir |
| 95 | 220 | ||
| 96 | ### Guest Mode | 221 | ### Guest Mode |
| 97 | 222 | ||
| @@ -103,8 +228,8 @@ from the mailing lists. When executed this way, the test process is | |||
| 103 | essentially running random code from the internet and could be | 228 | essentially running random code from the internet and could be |
| 104 | catastrophic if malicious bits or even poorly-handled edge cases aren't | 229 | catastrophic if malicious bits or even poorly-handled edge cases aren't |
| 105 | protected against. In order to use this mode, the | 230 | protected against. In order to use this mode, the |
| 106 | https://git.yoctoproject.org/patchtest/ repository must be cloned and | 231 | https://git.yoctoproject.org/meta-patchtest/ repository must be cloned |
| 107 | the meta-patchtest layer added to bblayers.conf. | 232 | and added to bblayers.conf. |
| 108 | 233 | ||
| 109 | The general flow of guest mode is: | 234 | The general flow of guest mode is: |
| 110 | 235 | ||
| @@ -123,7 +248,7 @@ The general flow of guest mode is: | |||
| 123 | -device virtio-9p-pci,fsdev=test_mount,mount_tag=test_mount -smp 4 -m | 248 | -device virtio-9p-pci,fsdev=test_mount,mount_tag=test_mount -smp 4 -m |
| 124 | 2048"` | 249 | 2048"` |
| 125 | 250 | ||
| 126 | Patchtest runs as an initscript for the core-image-patchtest image and | 251 | Patchtest is run by an initscript for the core-image-patchtest image and |
| 127 | shuts down after completion, so there is no input required from a user | 252 | shuts down after completion, so there is no input required from a user |
| 128 | during operation. Unlike in host mode, the guest is designed to | 253 | during operation. Unlike in host mode, the guest is designed to |
| 129 | automatically generate test result files, in the same directory as the | 254 | automatically generate test result files, in the same directory as the |
| @@ -131,6 +256,29 @@ targeted patch files but with .testresult as an extension. These contain | |||
| 131 | the entire output of the patchtest run for each respective pass, | 256 | the entire output of the patchtest run for each respective pass, |
| 132 | including the PASS, FAIL, and SKIP indicators for each test run. | 257 | including the PASS, FAIL, and SKIP indicators for each test run. |
| 133 | 258 | ||
| 259 | ### Running Patchtest Selftests | ||
| 260 | |||
| 261 | Patchtest also includes selftests, which are currently in the form of | ||
| 262 | several contrived patch files and a runner script found in | ||
| 263 | `meta/lib/patchtest/selftest/`. In order to run these, the | ||
| 264 | `meta-selftest` layer must be added to bblayers.conf. It is also | ||
| 265 | recommended to set BB_SERVER_TIMEOUT (and thus enable memory-resident | ||
| 266 | bitbake) in local.conf to reduce runtime, as the bitbake startup process | ||
| 267 | will otherwise add to it significantly when restarted for each test | ||
| 268 | patch. | ||
| 269 | |||
| 270 | If you have setup a Python virtual environment and sourced the | ||
| 271 | oe-init-build-env script to run patchtest following the directions | ||
| 272 | provided above in the "Installation" section, and you have added the | ||
| 273 | meta-selftest layer (from oe-core or poky) to your build, running the | ||
| 274 | patchtest selftest is as simple as: | ||
| 275 | |||
| 276 | (patchtest.venv) $ yocto/openembedded-core/meta/lib/patchtest/selftest/selftest | ||
| 277 | |||
| 278 | or: | ||
| 279 | |||
| 280 | (patchtest.venv) $ yocto/poky/meta/lib/patchtest/selftest/selftest | ||
| 281 | |||
| 134 | ## Contributing | 282 | ## Contributing |
| 135 | 283 | ||
| 136 | The yocto mailing list (openembedded-core@lists.openembedded.org) is used for questions, | 284 | The yocto mailing list (openembedded-core@lists.openembedded.org) is used for questions, |
diff --git a/scripts/pull-sdpx-licenses.py b/scripts/pull-sdpx-licenses.py new file mode 100755 index 0000000000..597a62133f --- /dev/null +++ b/scripts/pull-sdpx-licenses.py | |||
| @@ -0,0 +1,101 @@ | |||
| 1 | #! /usr/bin/env python3 | ||
| 2 | # | ||
| 3 | # Copyright OpenEmbedded Contributors | ||
| 4 | # | ||
| 5 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 6 | |||
| 7 | import argparse | ||
| 8 | import json | ||
| 9 | import sys | ||
| 10 | import urllib.request | ||
| 11 | from pathlib import Path | ||
| 12 | |||
| 13 | TOP_DIR = Path(__file__).parent.parent | ||
| 14 | |||
| 15 | |||
| 16 | def main(): | ||
| 17 | parser = argparse.ArgumentParser( | ||
| 18 | description="Update SPDX License files from upstream" | ||
| 19 | ) | ||
| 20 | parser.add_argument( | ||
| 21 | "-v", | ||
| 22 | "--version", | ||
| 23 | metavar="MAJOR.MINOR[.MICRO]", | ||
| 24 | help="Pull specific version of License list instead of latest", | ||
| 25 | ) | ||
| 26 | parser.add_argument( | ||
| 27 | "--overwrite", | ||
| 28 | action="store_true", | ||
| 29 | help="Update existing license file text with upstream text", | ||
| 30 | ) | ||
| 31 | parser.add_argument( | ||
| 32 | "--deprecated", | ||
| 33 | action="store_true", | ||
| 34 | help="Update deprecated licenses", | ||
| 35 | ) | ||
| 36 | parser.add_argument( | ||
| 37 | "--dest", | ||
| 38 | type=Path, | ||
| 39 | default=TOP_DIR / "meta" / "files" / "common-licenses", | ||
| 40 | help="Write licenses to directory DEST. Default is %(default)s", | ||
| 41 | ) | ||
| 42 | |||
| 43 | args = parser.parse_args() | ||
| 44 | |||
| 45 | if args.version: | ||
| 46 | version = f"v{args.version}" | ||
| 47 | else: | ||
| 48 | # Fetch the latest release | ||
| 49 | req = urllib.request.Request( | ||
| 50 | "https://api.github.com/repos/spdx/license-list-data/releases/latest" | ||
| 51 | ) | ||
| 52 | req.add_header("X-GitHub-Api-Version", "2022-11-28") | ||
| 53 | req.add_header("Accept", "application/vnd.github+json") | ||
| 54 | with urllib.request.urlopen(req) as response: | ||
| 55 | data = json.load(response) | ||
| 56 | version = data["tag_name"] | ||
| 57 | |||
| 58 | print(f"Pulling SPDX license list version {version}") | ||
| 59 | req = urllib.request.Request( | ||
| 60 | f"https://raw.githubusercontent.com/spdx/license-list-data/{version}/json/licenses.json" | ||
| 61 | ) | ||
| 62 | with urllib.request.urlopen(req) as response: | ||
| 63 | spdx_licenses = json.load(response) | ||
| 64 | |||
| 65 | with (TOP_DIR / "meta" / "files" / "spdx-licenses.json").open("w") as f: | ||
| 66 | json.dump(spdx_licenses, f, sort_keys=True, indent=2) | ||
| 67 | |||
| 68 | total_count = len(spdx_licenses["licenses"]) | ||
| 69 | updated = 0 | ||
| 70 | for idx, lic in enumerate(spdx_licenses["licenses"]): | ||
| 71 | lic_id = lic["licenseId"] | ||
| 72 | |||
| 73 | print(f"[{idx + 1} of {total_count}] ", end="") | ||
| 74 | |||
| 75 | dest_license_file = args.dest / lic_id | ||
| 76 | if dest_license_file.is_file() and not args.overwrite: | ||
| 77 | print(f"Skipping {lic_id} since it already exists") | ||
| 78 | continue | ||
| 79 | |||
| 80 | print(f"Fetching {lic_id}... ", end="", flush=True) | ||
| 81 | |||
| 82 | req = urllib.request.Request(lic["detailsUrl"]) | ||
| 83 | with urllib.request.urlopen(req) as response: | ||
| 84 | lic_data = json.load(response) | ||
| 85 | |||
| 86 | if lic_data["isDeprecatedLicenseId"] and not args.deprecated: | ||
| 87 | print("Skipping (deprecated)") | ||
| 88 | continue | ||
| 89 | |||
| 90 | with dest_license_file.open("w") as f: | ||
| 91 | f.write(lic_data["licenseText"]) | ||
| 92 | updated += 1 | ||
| 93 | print("done") | ||
| 94 | |||
| 95 | print(f"Updated {updated} licenses") | ||
| 96 | |||
| 97 | return 0 | ||
| 98 | |||
| 99 | |||
| 100 | if __name__ == "__main__": | ||
| 101 | sys.exit(main()) | ||
diff --git a/scripts/pybootchartgui/pybootchartgui/draw.py b/scripts/pybootchartgui/pybootchartgui/draw.py index c6e67833ab..4d76ce6e8e 100644 --- a/scripts/pybootchartgui/pybootchartgui/draw.py +++ b/scripts/pybootchartgui/pybootchartgui/draw.py | |||
| @@ -69,6 +69,11 @@ CPU_COLOR = (0.40, 0.55, 0.70, 1.0) | |||
| 69 | IO_COLOR = (0.76, 0.48, 0.48, 0.5) | 69 | IO_COLOR = (0.76, 0.48, 0.48, 0.5) |
| 70 | # Disk throughput color. | 70 | # Disk throughput color. |
| 71 | DISK_TPUT_COLOR = (0.20, 0.71, 0.20, 1.0) | 71 | DISK_TPUT_COLOR = (0.20, 0.71, 0.20, 1.0) |
| 72 | |||
| 73 | BYTES_RECEIVED_COLOR = (0.0, 0.0, 1.0, 1.0) | ||
| 74 | BYTES_TRANSMITTED_COLOR = (1.0, 0.0, 0.0, 1.0) | ||
| 75 | BYTES_RECEIVE_DIFF_COLOR = (0.0, 0.0, 1.0, 0.3) | ||
| 76 | BYTES_TRANSMIT_DIFF_COLOR = (1.0, 0.0, 0.0, 0.3) | ||
| 72 | # CPU load chart color. | 77 | # CPU load chart color. |
| 73 | FILE_OPEN_COLOR = (0.20, 0.71, 0.71, 1.0) | 78 | FILE_OPEN_COLOR = (0.20, 0.71, 0.71, 1.0) |
| 74 | # Mem cached color | 79 | # Mem cached color |
| @@ -366,6 +371,8 @@ def extents(options, xscale, trace): | |||
| 366 | h += 30 + bar_h | 371 | h += 30 + bar_h |
| 367 | if trace.mem_stats: | 372 | if trace.mem_stats: |
| 368 | h += meminfo_bar_h | 373 | h += meminfo_bar_h |
| 374 | if trace.net_stats: | ||
| 375 | h += (30 + bar_h) * len(trace.net_stats) | ||
| 369 | 376 | ||
| 370 | # Allow for width of process legend and offset | 377 | # Allow for width of process legend and offset |
| 371 | if w < (720 + off_x): | 378 | if w < (720 + off_x): |
| @@ -437,6 +444,49 @@ def render_charts(ctx, options, clip, trace, curr_y, w, h, sec_w): | |||
| 437 | 444 | ||
| 438 | curr_y = curr_y + 30 + bar_h | 445 | curr_y = curr_y + 30 + bar_h |
| 439 | 446 | ||
| 447 | if trace.net_stats: | ||
| 448 | for iface, samples in trace.net_stats.items(): | ||
| 449 | max_received_sample = max(samples, key=lambda s: s.received_bytes) | ||
| 450 | max_transmitted_sample = max(samples, key=lambda s: s.transmitted_bytes) | ||
| 451 | max_receive_diff_sample = max(samples, key=lambda s: s.receive_diff) | ||
| 452 | max_transmit_diff_sample = max(samples, key=lambda s: s.transmit_diff) | ||
| 453 | |||
| 454 | draw_text(ctx, "Iface: %s" % (iface), TEXT_COLOR, off_x, curr_y+20) | ||
| 455 | draw_legend_line(ctx, "Bytes received (max %d)" % (max_received_sample.received_bytes), | ||
| 456 | BYTES_RECEIVED_COLOR, off_x+150, curr_y+20, leg_s) | ||
| 457 | draw_legend_line(ctx, "Bytes transmitted (max %d)" % (max_transmitted_sample.transmitted_bytes), | ||
| 458 | BYTES_TRANSMITTED_COLOR, off_x+400, curr_y+20, leg_s) | ||
| 459 | draw_legend_box(ctx, "Bytes receive diff (max %d)" % (max_receive_diff_sample.receive_diff), | ||
| 460 | BYTES_RECEIVE_DIFF_COLOR, off_x+650, curr_y+20, leg_s) | ||
| 461 | draw_legend_box(ctx, "Bytes transmit diff (max %d)" % (max_transmit_diff_sample.transmit_diff), | ||
| 462 | BYTES_TRANSMIT_DIFF_COLOR, off_x+900, curr_y+20, leg_s) | ||
| 463 | |||
| 464 | |||
| 465 | chart_rect = (off_x, curr_y + 30, w, bar_h) | ||
| 466 | if clip_visible(clip, chart_rect): | ||
| 467 | draw_box_ticks(ctx, chart_rect, sec_w) | ||
| 468 | draw_annotations(ctx, proc_tree, trace.times, chart_rect) | ||
| 469 | |||
| 470 | if clip_visible (clip, chart_rect): | ||
| 471 | draw_chart (ctx, BYTES_RECEIVED_COLOR, False, chart_rect, \ | ||
| 472 | [(sample.time, sample.received_bytes) for sample in samples], \ | ||
| 473 | proc_tree, None) | ||
| 474 | |||
| 475 | draw_chart (ctx, BYTES_TRANSMITTED_COLOR, False, chart_rect, \ | ||
| 476 | [(sample.time, sample.transmitted_bytes) for sample in samples], \ | ||
| 477 | proc_tree, None) | ||
| 478 | |||
| 479 | if clip_visible (clip, chart_rect): | ||
| 480 | draw_chart (ctx, BYTES_RECEIVE_DIFF_COLOR, True, chart_rect, \ | ||
| 481 | [(sample.time, sample.receive_diff) for sample in samples], \ | ||
| 482 | proc_tree, None) | ||
| 483 | |||
| 484 | draw_chart (ctx, BYTES_TRANSMIT_DIFF_COLOR, True, chart_rect, \ | ||
| 485 | [(sample.time, sample.transmit_diff) for sample in samples], \ | ||
| 486 | proc_tree, None) | ||
| 487 | |||
| 488 | curr_y = curr_y + 30 + bar_h | ||
| 489 | |||
| 440 | # render CPU pressure chart | 490 | # render CPU pressure chart |
| 441 | if trace.cpu_pressure: | 491 | if trace.cpu_pressure: |
| 442 | max_sample_avg = max (trace.cpu_pressure, key = lambda s: s.avg10) | 492 | max_sample_avg = max (trace.cpu_pressure, key = lambda s: s.avg10) |
| @@ -796,7 +846,7 @@ def draw_header (ctx, headers, duration): | |||
| 796 | toshow = [ | 846 | toshow = [ |
| 797 | ('system.uname', 'uname', lambda s: s), | 847 | ('system.uname', 'uname', lambda s: s), |
| 798 | ('system.release', 'release', lambda s: s), | 848 | ('system.release', 'release', lambda s: s), |
| 799 | ('system.cpu', 'CPU', lambda s: re.sub('model name\s*:\s*', '', s, 1)), | 849 | ('system.cpu', 'CPU', lambda s: re.sub(r'model name\s*:\s*', '', s, 1)), |
| 800 | ('system.kernel.options', 'kernel options', lambda s: s), | 850 | ('system.kernel.options', 'kernel options', lambda s: s), |
| 801 | ] | 851 | ] |
| 802 | 852 | ||
diff --git a/scripts/pybootchartgui/pybootchartgui/parsing.py b/scripts/pybootchartgui/pybootchartgui/parsing.py index 63a53b6b88..72a54c6ba5 100644 --- a/scripts/pybootchartgui/pybootchartgui/parsing.py +++ b/scripts/pybootchartgui/pybootchartgui/parsing.py | |||
| @@ -48,6 +48,7 @@ class Trace: | |||
| 48 | self.filename = None | 48 | self.filename = None |
| 49 | self.parent_map = None | 49 | self.parent_map = None |
| 50 | self.mem_stats = [] | 50 | self.mem_stats = [] |
| 51 | self.net_stats = [] | ||
| 51 | self.monitor_disk = None | 52 | self.monitor_disk = None |
| 52 | self.cpu_pressure = [] | 53 | self.cpu_pressure = [] |
| 53 | self.io_pressure = [] | 54 | self.io_pressure = [] |
| @@ -457,7 +458,7 @@ def _parse_proc_disk_stat_log(file): | |||
| 457 | not sda1, sda2 etc. The format of relevant lines should be: | 458 | not sda1, sda2 etc. The format of relevant lines should be: |
| 458 | {major minor name rio rmerge rsect ruse wio wmerge wsect wuse running use aveq} | 459 | {major minor name rio rmerge rsect ruse wio wmerge wsect wuse running use aveq} |
| 459 | """ | 460 | """ |
| 460 | disk_regex_re = re.compile ('^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$') | 461 | disk_regex_re = re.compile (r'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$') |
| 461 | 462 | ||
| 462 | # this gets called an awful lot. | 463 | # this gets called an awful lot. |
| 463 | def is_relevant_line(linetokens): | 464 | def is_relevant_line(linetokens): |
| @@ -557,6 +558,21 @@ def _parse_monitor_disk_log(file): | |||
| 557 | 558 | ||
| 558 | return disk_stats | 559 | return disk_stats |
| 559 | 560 | ||
| 561 | |||
| 562 | def _parse_reduced_net_log(file): | ||
| 563 | net_stats = {} | ||
| 564 | for time, lines in _parse_timed_blocks(file): | ||
| 565 | |||
| 566 | for line in lines: | ||
| 567 | parts = line.split() | ||
| 568 | iface = parts[0][:-1] | ||
| 569 | if iface not in net_stats: | ||
| 570 | net_stats[iface] = [NetSample(time, iface, int(parts[1]), int(parts[2]), int(parts[3]), int(parts[4]))] | ||
| 571 | else: | ||
| 572 | net_stats[iface].append(NetSample(time, iface, int(parts[1]), int(parts[2]), int(parts[3]), int(parts[4]))) | ||
| 573 | return net_stats | ||
| 574 | |||
| 575 | |||
| 560 | def _parse_pressure_logs(file, filename): | 576 | def _parse_pressure_logs(file, filename): |
| 561 | """ | 577 | """ |
| 562 | Parse file for "some" pressure with 'avg10', 'avg60' 'avg300' and delta total values | 578 | Parse file for "some" pressure with 'avg10', 'avg60' 'avg300' and delta total values |
| @@ -594,8 +610,8 @@ def _parse_pressure_logs(file, filename): | |||
| 594 | # [ 0.039993] calling migration_init+0x0/0x6b @ 1 | 610 | # [ 0.039993] calling migration_init+0x0/0x6b @ 1 |
| 595 | # [ 0.039993] initcall migration_init+0x0/0x6b returned 1 after 0 usecs | 611 | # [ 0.039993] initcall migration_init+0x0/0x6b returned 1 after 0 usecs |
| 596 | def _parse_dmesg(writer, file): | 612 | def _parse_dmesg(writer, file): |
| 597 | timestamp_re = re.compile ("^\[\s*(\d+\.\d+)\s*]\s+(.*)$") | 613 | timestamp_re = re.compile (r"^\[\s*(\d+\.\d+)\s*]\s+(.*)$") |
| 598 | split_re = re.compile ("^(\S+)\s+([\S\+_-]+) (.*)$") | 614 | split_re = re.compile (r"^(\S+)\s+([\S\+_-]+) (.*)$") |
| 599 | processMap = {} | 615 | processMap = {} |
| 600 | idx = 0 | 616 | idx = 0 |
| 601 | inc = 1.0 / 1000000 | 617 | inc = 1.0 / 1000000 |
| @@ -640,7 +656,7 @@ def _parse_dmesg(writer, file): | |||
| 640 | # print "foo: '%s' '%s' '%s'" % (type, func, rest) | 656 | # print "foo: '%s' '%s' '%s'" % (type, func, rest) |
| 641 | if type == "calling": | 657 | if type == "calling": |
| 642 | ppid = kernel.pid | 658 | ppid = kernel.pid |
| 643 | p = re.match ("\@ (\d+)", rest) | 659 | p = re.match (r"\@ (\d+)", rest) |
| 644 | if p is not None: | 660 | if p is not None: |
| 645 | ppid = float (p.group(1)) // 1000 | 661 | ppid = float (p.group(1)) // 1000 |
| 646 | # print "match: '%s' ('%g') at '%s'" % (func, ppid, time_ms) | 662 | # print "match: '%s' ('%g') at '%s'" % (func, ppid, time_ms) |
| @@ -742,7 +758,7 @@ def get_num_cpus(headers): | |||
| 742 | cpu_model = headers.get("system.cpu") | 758 | cpu_model = headers.get("system.cpu") |
| 743 | if cpu_model is None: | 759 | if cpu_model is None: |
| 744 | return 1 | 760 | return 1 |
| 745 | mat = re.match(".*\\((\\d+)\\)", cpu_model) | 761 | mat = re.match(r".*\\((\\d+)\\)", cpu_model) |
| 746 | if mat is None: | 762 | if mat is None: |
| 747 | return 1 | 763 | return 1 |
| 748 | return max (int(mat.group(1)), 1) | 764 | return max (int(mat.group(1)), 1) |
| @@ -767,6 +783,8 @@ def _do_parse(writer, state, filename, file): | |||
| 767 | state.cmdline = _parse_cmdline_log(writer, file) | 783 | state.cmdline = _parse_cmdline_log(writer, file) |
| 768 | elif name == "monitor_disk.log": | 784 | elif name == "monitor_disk.log": |
| 769 | state.monitor_disk = _parse_monitor_disk_log(file) | 785 | state.monitor_disk = _parse_monitor_disk_log(file) |
| 786 | elif name == "reduced_proc_net.log": | ||
| 787 | state.net_stats = _parse_reduced_net_log(file) | ||
| 770 | #pressure logs are in a subdirectory | 788 | #pressure logs are in a subdirectory |
| 771 | elif name == "cpu.log": | 789 | elif name == "cpu.log": |
| 772 | state.cpu_pressure = _parse_pressure_logs(file, name) | 790 | state.cpu_pressure = _parse_pressure_logs(file, name) |
diff --git a/scripts/pybootchartgui/pybootchartgui/samples.py b/scripts/pybootchartgui/pybootchartgui/samples.py index a70d8a5a28..7c92d2ce6a 100644 --- a/scripts/pybootchartgui/pybootchartgui/samples.py +++ b/scripts/pybootchartgui/pybootchartgui/samples.py | |||
| @@ -37,6 +37,16 @@ class CPUSample: | |||
| 37 | return str(self.time) + "\t" + str(self.user) + "\t" + \ | 37 | return str(self.time) + "\t" + str(self.user) + "\t" + \ |
| 38 | str(self.sys) + "\t" + str(self.io) + "\t" + str (self.swap) | 38 | str(self.sys) + "\t" + str(self.io) + "\t" + str (self.swap) |
| 39 | 39 | ||
| 40 | |||
| 41 | class NetSample: | ||
| 42 | def __init__(self, time, iface, received_bytes, transmitted_bytes, receive_diff, transmit_diff): | ||
| 43 | self.time = time | ||
| 44 | self.iface = iface | ||
| 45 | self.received_bytes = received_bytes | ||
| 46 | self.transmitted_bytes = transmitted_bytes | ||
| 47 | self.receive_diff = receive_diff | ||
| 48 | self.transmit_diff = transmit_diff | ||
| 49 | |||
| 40 | class CPUPressureSample: | 50 | class CPUPressureSample: |
| 41 | def __init__(self, time, avg10, avg60, avg300, deltaTotal): | 51 | def __init__(self, time, avg10, avg60, avg300, deltaTotal): |
| 42 | self.time = time | 52 | self.time = time |
diff --git a/scripts/relocate_sdk.py b/scripts/relocate_sdk.py index 8a728720ba..9e01c09cb0 100755 --- a/scripts/relocate_sdk.py +++ b/scripts/relocate_sdk.py | |||
| @@ -49,6 +49,34 @@ def get_arch(): | |||
| 49 | elif ei_class == 2: | 49 | elif ei_class == 2: |
| 50 | return 64 | 50 | return 64 |
| 51 | 51 | ||
| 52 | def get_dl_arch(dl_path): | ||
| 53 | try: | ||
| 54 | with open(dl_path, "r+b") as f: | ||
| 55 | e_ident =f.read(16) | ||
| 56 | except IOError: | ||
| 57 | exctype, ioex = sys.exc_info()[:2] | ||
| 58 | if ioex.errno == errno.ETXTBSY: | ||
| 59 | print("Could not open %s. File used by another process.\nPlease "\ | ||
| 60 | "make sure you exit all processes that might use any SDK "\ | ||
| 61 | "binaries." % e) | ||
| 62 | else: | ||
| 63 | print("Could not open %s: %s(%d)" % (e, ioex.strerror, ioex.errno)) | ||
| 64 | sys.exit(-1) | ||
| 65 | |||
| 66 | ei_mag0,ei_mag1_3,ei_class,ei_data,ei_version = struct.unpack("<B3sBBB9x", e_ident) | ||
| 67 | |||
| 68 | if (ei_mag0 != 0x7f and ei_mag1_3 != "ELF") or ei_class == 0: | ||
| 69 | print("ERROR: unknow %s" % dl_path) | ||
| 70 | sys.exit(-1) | ||
| 71 | |||
| 72 | if ei_class == 1: | ||
| 73 | arch = 32 | ||
| 74 | elif ei_class == 2: | ||
| 75 | arch = 64 | ||
| 76 | |||
| 77 | return arch | ||
| 78 | |||
| 79 | |||
| 52 | def parse_elf_header(): | 80 | def parse_elf_header(): |
| 53 | global e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, e_flags,\ | 81 | global e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, e_flags,\ |
| 54 | e_ehsize, e_phentsize, e_phnum, e_shentsize, e_shnum, e_shstrndx | 82 | e_ehsize, e_phentsize, e_phnum, e_shentsize, e_shnum, e_shstrndx |
| @@ -223,6 +251,8 @@ else: | |||
| 223 | 251 | ||
| 224 | executables_list = sys.argv[3:] | 252 | executables_list = sys.argv[3:] |
| 225 | 253 | ||
| 254 | dl_arch = get_dl_arch(new_dl_path) | ||
| 255 | |||
| 226 | errors = False | 256 | errors = False |
| 227 | for e in executables_list: | 257 | for e in executables_list: |
| 228 | perms = os.stat(e)[stat.ST_MODE] | 258 | perms = os.stat(e)[stat.ST_MODE] |
| @@ -247,7 +277,7 @@ for e in executables_list: | |||
| 247 | old_size = os.path.getsize(e) | 277 | old_size = os.path.getsize(e) |
| 248 | if old_size >= 64: | 278 | if old_size >= 64: |
| 249 | arch = get_arch() | 279 | arch = get_arch() |
| 250 | if arch: | 280 | if arch and arch == dl_arch: |
| 251 | parse_elf_header() | 281 | parse_elf_header() |
| 252 | if not change_interpreter(e): | 282 | if not change_interpreter(e): |
| 253 | errors = True | 283 | errors = True |
diff --git a/scripts/resulttool b/scripts/resulttool index fc282bda6c..66a6af9959 100755 --- a/scripts/resulttool +++ b/scripts/resulttool | |||
| @@ -15,6 +15,9 @@ | |||
| 15 | # To report test report, execute the below | 15 | # To report test report, execute the below |
| 16 | # $ resulttool report <source_dir> | 16 | # $ resulttool report <source_dir> |
| 17 | # | 17 | # |
| 18 | # To create a unit test report in JUnit XML format, execute the below | ||
| 19 | # $ resulttool junit <json_file> | ||
| 20 | # | ||
| 18 | # To perform regression file analysis, execute the below | 21 | # To perform regression file analysis, execute the below |
| 19 | # $ resulttool regression-file <base_result_file> <target_result_file> | 22 | # $ resulttool regression-file <base_result_file> <target_result_file> |
| 20 | # | 23 | # |
| @@ -43,6 +46,7 @@ import resulttool.regression | |||
| 43 | import resulttool.report | 46 | import resulttool.report |
| 44 | import resulttool.manualexecution | 47 | import resulttool.manualexecution |
| 45 | import resulttool.log | 48 | import resulttool.log |
| 49 | import resulttool.junit | ||
| 46 | logger = scriptutils.logger_create('resulttool') | 50 | logger = scriptutils.logger_create('resulttool') |
| 47 | 51 | ||
| 48 | def main(): | 52 | def main(): |
| @@ -61,6 +65,7 @@ def main(): | |||
| 61 | resulttool.regression.register_commands(subparsers) | 65 | resulttool.regression.register_commands(subparsers) |
| 62 | resulttool.report.register_commands(subparsers) | 66 | resulttool.report.register_commands(subparsers) |
| 63 | resulttool.log.register_commands(subparsers) | 67 | resulttool.log.register_commands(subparsers) |
| 68 | resulttool.junit.register_commands(subparsers) | ||
| 64 | 69 | ||
| 65 | args = parser.parse_args() | 70 | args = parser.parse_args() |
| 66 | if args.debug: | 71 | if args.debug: |
diff --git a/scripts/runqemu b/scripts/runqemu index 69cd44864e..2be7a0f286 100755 --- a/scripts/runqemu +++ b/scripts/runqemu | |||
| @@ -197,11 +197,13 @@ class BaseConfig(object): | |||
| 197 | self.portlocks = {} | 197 | self.portlocks = {} |
| 198 | self.bitbake_e = '' | 198 | self.bitbake_e = '' |
| 199 | self.snapshot = False | 199 | self.snapshot = False |
| 200 | self.wictypes = ('wic', 'wic.vmdk', 'wic.qcow2', 'wic.vdi', "wic.vhd", "wic.vhdx") | 200 | self.wictypes = ('wic.zst', 'wic', 'wic.vmdk', 'wic.qcow2', 'wic.vdi', "wic.vhd", "wic.vhdx") |
| 201 | self.fstypes = ('ext2', 'ext3', 'ext4', 'jffs2', 'nfs', 'btrfs', | 201 | self.fstypes = ('ext2', 'ext3', 'ext4', 'ext2.zst', 'ext3.zst', 'ext4.zst', |
| 202 | 'cpio.gz', 'cpio', 'ramfs', 'tar.bz2', 'tar.gz', | 202 | 'jffs2', 'nfs', 'btrfs', 'cpio.gz', 'cpio', 'ramfs', |
| 203 | 'tar.bz2', 'tar.gz', 'tar.zst', | ||
| 203 | 'squashfs', 'squashfs-xz', 'squashfs-lzo', | 204 | 'squashfs', 'squashfs-xz', 'squashfs-lzo', |
| 204 | 'squashfs-lz4', 'squashfs-zst') | 205 | 'squashfs-lz4', 'squashfs-zst', |
| 206 | 'erofs', 'erofs-lz4', 'erofs-lz4hc') | ||
| 205 | self.vmtypes = ('hddimg', 'iso') | 207 | self.vmtypes = ('hddimg', 'iso') |
| 206 | self.fsinfo = {} | 208 | self.fsinfo = {} |
| 207 | self.network_device = "-device e1000,netdev=net0,mac=@MAC@" | 209 | self.network_device = "-device e1000,netdev=net0,mac=@MAC@" |
| @@ -368,19 +370,20 @@ class BaseConfig(object): | |||
| 368 | - Check whether it is an NFS dir | 370 | - Check whether it is an NFS dir |
| 369 | - Check whether it is an OVMF flash file | 371 | - Check whether it is an OVMF flash file |
| 370 | """ | 372 | """ |
| 373 | n = os.path.basename(p) | ||
| 371 | if p.endswith('.qemuboot.conf'): | 374 | if p.endswith('.qemuboot.conf'): |
| 372 | self.qemuboot = p | 375 | self.qemuboot = p |
| 373 | self.qbconfload = True | 376 | self.qbconfload = True |
| 374 | elif re.search('\\.bin$', p) or re.search('bzImage', p) or \ | 377 | elif re.search('\\.bin$', n) or re.search('bzImage', n) or \ |
| 375 | re.search('zImage', p) or re.search('vmlinux', p) or \ | 378 | re.search('zImage', n) or re.search('vmlinux', n) or \ |
| 376 | re.search('fitImage', p) or re.search('uImage', p): | 379 | re.search('fitImage', n) or re.search('uImage', n): |
| 377 | self.kernel = p | 380 | self.kernel = p |
| 378 | elif os.path.isfile(p) and ('-image-' in os.path.basename(p) or '.rootfs.' in os.path.basename(p)): | 381 | elif os.path.isfile(p) and ('-image-' in os.path.basename(p) or '.rootfs.' in os.path.basename(p)): |
| 379 | self.rootfs = p | 382 | self.rootfs = p |
| 380 | # Check filename against self.fstypes can handle <file>.cpio.gz, | 383 | # Check filename against self.fstypes can handle <file>.cpio.gz, |
| 381 | # otherwise, its type would be "gz", which is incorrect. | 384 | # otherwise, its type would be "gz", which is incorrect. |
| 382 | fst = "" | 385 | fst = "" |
| 383 | for t in self.fstypes: | 386 | for t in self.fstypes + self.vmtypes + self.wictypes: |
| 384 | if p.endswith(t): | 387 | if p.endswith(t): |
| 385 | fst = t | 388 | fst = t |
| 386 | break | 389 | break |
| @@ -418,6 +421,46 @@ class BaseConfig(object): | |||
| 418 | else: | 421 | else: |
| 419 | raise RunQemuError("Unknown path arg %s" % p) | 422 | raise RunQemuError("Unknown path arg %s" % p) |
| 420 | 423 | ||
| 424 | def uncompress_rootfs(self): | ||
| 425 | """Decompress ZST rootfs image if needed""" | ||
| 426 | if not self.rootfs or not self.fstype.endswith('.zst'): | ||
| 427 | return | ||
| 428 | |||
| 429 | # Ensure snapshot mode is active before allowing decompression. | ||
| 430 | if not self.snapshot: | ||
| 431 | raise RunQemuError(".zst images are only supported with snapshot mode. " \ | ||
| 432 | "You can either use the \"snapshot\" option or use an uncompressed image.") | ||
| 433 | |||
| 434 | # Get the real path to the image to avoid issues when a symbolic link is passed. | ||
| 435 | # This ensures we always operate on the actual file. | ||
| 436 | image_path = os.path.realpath(self.rootfs) | ||
| 437 | # Extract target filename by removing .zst | ||
| 438 | image_dir = os.path.dirname(image_path) | ||
| 439 | uncompressed_name = os.path.basename(image_path).replace(".zst", "") | ||
| 440 | uncompressed_path = os.path.join(image_dir, uncompressed_name) | ||
| 441 | |||
| 442 | # If the decompressed image already exists (e.g., in the deploy directory), | ||
| 443 | # we use it directly to avoid overwriting artifacts generated by the build system. | ||
| 444 | # This prevents redundant decompression and preserves build outputs. | ||
| 445 | if os.path.exists(uncompressed_path): | ||
| 446 | logger.warning(f"Found existing decompressed image: {uncompressed_path}, Using it directly.") | ||
| 447 | else: | ||
| 448 | logger.info(f"Decompressing {self.rootfs} to {uncompressed_path}") | ||
| 449 | # Ensure the 'zstd' tool is installed before attempting to decompress. | ||
| 450 | if not shutil.which('zstd'): | ||
| 451 | raise RunQemuError(f"'zstd' is required to decompress {self.rootfs} but was not found in PATH") | ||
| 452 | try: | ||
| 453 | subprocess.check_call(['zstd', '-d', image_path, '-o', uncompressed_path]) | ||
| 454 | except subprocess.CalledProcessError as e: | ||
| 455 | raise RunQemuError(f"Failed to decompress {self.rootfs}: {e}") | ||
| 456 | finally: | ||
| 457 | # Mark temporary file for deletion | ||
| 458 | self.cleanup_files.append(uncompressed_path) | ||
| 459 | |||
| 460 | # Use the decompressed image as the rootfs | ||
| 461 | self.rootfs = uncompressed_path | ||
| 462 | self.fstype = self.fstype.removesuffix(".zst") | ||
| 463 | |||
| 421 | def check_arg_machine(self, arg): | 464 | def check_arg_machine(self, arg): |
| 422 | """Check whether it is a machine""" | 465 | """Check whether it is a machine""" |
| 423 | if self.get('MACHINE') == arg: | 466 | if self.get('MACHINE') == arg: |
| @@ -468,9 +511,11 @@ class BaseConfig(object): | |||
| 468 | self.set("IMAGE_LINK_NAME", image_link_name) | 511 | self.set("IMAGE_LINK_NAME", image_link_name) |
| 469 | logger.debug('Using IMAGE_LINK_NAME = "%s"' % image_link_name) | 512 | logger.debug('Using IMAGE_LINK_NAME = "%s"' % image_link_name) |
| 470 | 513 | ||
| 471 | def set_dri_path(self): | 514 | def set_mesa_paths(self): |
| 472 | drivers_path = os.path.join(self.bindir_native, '../lib/dri') | 515 | drivers_path = os.path.join(self.bindir_native, '../lib/dri') |
| 473 | if not os.path.exists(drivers_path) or not os.listdir(drivers_path): | 516 | gbm_path = os.path.join(self.bindir_native, '../lib/gbm') |
| 517 | if not os.path.exists(drivers_path) or not os.listdir(drivers_path) \ | ||
| 518 | or not os.path.exists(gbm_path) or not os.listdir(gbm_path): | ||
| 474 | raise RunQemuError(""" | 519 | raise RunQemuError(""" |
| 475 | qemu has been built without opengl support and accelerated graphics support is not available. | 520 | qemu has been built without opengl support and accelerated graphics support is not available. |
| 476 | To enable it, add: | 521 | To enable it, add: |
| @@ -479,6 +524,7 @@ DISTRO_FEATURES_NATIVESDK:append = " opengl" | |||
| 479 | to your build configuration. | 524 | to your build configuration. |
| 480 | """) | 525 | """) |
| 481 | self.qemu_environ['LIBGL_DRIVERS_PATH'] = drivers_path | 526 | self.qemu_environ['LIBGL_DRIVERS_PATH'] = drivers_path |
| 527 | self.qemu_environ['GBM_BACKENDS_PATH'] = gbm_path | ||
| 482 | 528 | ||
| 483 | def check_args(self): | 529 | def check_args(self): |
| 484 | for debug in ("-d", "--debug"): | 530 | for debug in ("-d", "--debug"): |
| @@ -961,34 +1007,12 @@ to your build configuration. | |||
| 961 | if not self.bitbake_e: | 1007 | if not self.bitbake_e: |
| 962 | self.load_bitbake_env() | 1008 | self.load_bitbake_env() |
| 963 | 1009 | ||
| 964 | if self.bitbake_e: | 1010 | native_vars = ['STAGING_DIR_NATIVE'] |
| 965 | native_vars = ['STAGING_DIR_NATIVE'] | 1011 | for nv in native_vars: |
| 966 | for nv in native_vars: | 1012 | s = re.search('^%s="(.*)"' % nv, self.bitbake_e, re.M) |
| 967 | s = re.search('^%s="(.*)"' % nv, self.bitbake_e, re.M) | 1013 | if s and s.group(1) != self.get(nv): |
| 968 | if s and s.group(1) != self.get(nv): | 1014 | logger.info('Overriding conf file setting of %s to %s from Bitbake environment' % (nv, s.group(1))) |
| 969 | logger.info('Overriding conf file setting of %s to %s from Bitbake environment' % (nv, s.group(1))) | 1015 | self.set(nv, s.group(1)) |
| 970 | self.set(nv, s.group(1)) | ||
| 971 | else: | ||
| 972 | # when we're invoked from a running bitbake instance we won't | ||
| 973 | # be able to call `bitbake -e`, then try: | ||
| 974 | # - get OE_TMPDIR from environment and guess paths based on it | ||
| 975 | # - get OECORE_NATIVE_SYSROOT from environment (for sdk) | ||
| 976 | tmpdir = self.get('OE_TMPDIR') | ||
| 977 | oecore_native_sysroot = self.get('OECORE_NATIVE_SYSROOT') | ||
| 978 | if tmpdir: | ||
| 979 | logger.info('Setting STAGING_DIR_NATIVE and STAGING_BINDIR_NATIVE relative to OE_TMPDIR (%s)' % tmpdir) | ||
| 980 | hostos, _, _, _, machine = os.uname() | ||
| 981 | buildsys = '%s-%s' % (machine, hostos.lower()) | ||
| 982 | staging_dir_native = '%s/sysroots/%s' % (tmpdir, buildsys) | ||
| 983 | self.set('STAGING_DIR_NATIVE', staging_dir_native) | ||
| 984 | elif oecore_native_sysroot: | ||
| 985 | logger.info('Setting STAGING_DIR_NATIVE to OECORE_NATIVE_SYSROOT (%s)' % oecore_native_sysroot) | ||
| 986 | self.set('STAGING_DIR_NATIVE', oecore_native_sysroot) | ||
| 987 | if self.get('STAGING_DIR_NATIVE'): | ||
| 988 | # we have to assume that STAGING_BINDIR_NATIVE is at usr/bin | ||
| 989 | staging_bindir_native = '%s/usr/bin' % self.get('STAGING_DIR_NATIVE') | ||
| 990 | logger.info('Setting STAGING_BINDIR_NATIVE to %s' % staging_bindir_native) | ||
| 991 | self.set('STAGING_BINDIR_NATIVE', '%s/usr/bin' % self.get('STAGING_DIR_NATIVE')) | ||
| 992 | 1016 | ||
| 993 | def print_config(self): | 1017 | def print_config(self): |
| 994 | logoutput = ['Continuing with the following parameters:'] | 1018 | logoutput = ['Continuing with the following parameters:'] |
| @@ -1008,6 +1032,9 @@ to your build configuration. | |||
| 1008 | logoutput.append('NFS_DIR: [%s]' % self.rootfs) | 1032 | logoutput.append('NFS_DIR: [%s]' % self.rootfs) |
| 1009 | else: | 1033 | else: |
| 1010 | logoutput.append('ROOTFS: [%s]' % self.rootfs) | 1034 | logoutput.append('ROOTFS: [%s]' % self.rootfs) |
| 1035 | logoutput.append('SNAPSHOT: [%s]' % | ||
| 1036 | "Enabled. Changes on rootfs won't be kept after QEMU shutdown." if self.snapshot | ||
| 1037 | else "Disabled. Changes on rootfs will be kept after QEMU shutdown.") | ||
| 1011 | if self.ovmf_bios: | 1038 | if self.ovmf_bios: |
| 1012 | logoutput.append('OVMF: %s' % self.ovmf_bios) | 1039 | logoutput.append('OVMF: %s' % self.ovmf_bios) |
| 1013 | if (self.ovmf_secboot_pkkek1): | 1040 | if (self.ovmf_secboot_pkkek1): |
| @@ -1192,19 +1219,22 @@ to your build configuration. | |||
| 1192 | raise RunQemuError("a new one with sudo.") | 1219 | raise RunQemuError("a new one with sudo.") |
| 1193 | 1220 | ||
| 1194 | gid = os.getgid() | 1221 | gid = os.getgid() |
| 1195 | uid = os.getuid() | ||
| 1196 | logger.info("Setting up tap interface under sudo") | 1222 | logger.info("Setting up tap interface under sudo") |
| 1197 | cmd = ('sudo', self.qemuifup, str(gid)) | 1223 | cmd = ('sudo', self.qemuifup, str(gid)) |
| 1198 | try: | 1224 | for _ in range(5): |
| 1199 | tap = subprocess.check_output(cmd).decode('utf-8').strip() | 1225 | try: |
| 1200 | except subprocess.CalledProcessError as e: | 1226 | tap = subprocess.check_output(cmd).decode('utf-8').strip() |
| 1201 | logger.error('Setting up tap device failed:\n%s\nRun runqemu-gen-tapdevs to manually create one.' % str(e)) | 1227 | except subprocess.CalledProcessError as e: |
| 1202 | sys.exit(1) | 1228 | logger.error('Setting up tap device failed:\n%s\nRun runqemu-gen-tapdevs to manually create one.' % str(e)) |
| 1203 | lockfile = os.path.join(lockdir, tap) | 1229 | sys.exit(1) |
| 1204 | self.taplock = lockfile + '.lock' | 1230 | lockfile = os.path.join(lockdir, tap) |
| 1205 | self.acquire_taplock() | 1231 | self.taplock = lockfile + '.lock' |
| 1206 | self.cleantap = True | 1232 | if self.acquire_taplock(): |
| 1207 | logger.debug('Created tap: %s' % tap) | 1233 | self.cleantap = True |
| 1234 | logger.debug('Created tap: %s' % tap) | ||
| 1235 | break | ||
| 1236 | else: | ||
| 1237 | tap = None | ||
| 1208 | 1238 | ||
| 1209 | if not tap: | 1239 | if not tap: |
| 1210 | logger.error("Failed to setup tap device. Run runqemu-gen-tapdevs to manually create.") | 1240 | logger.error("Failed to setup tap device. Run runqemu-gen-tapdevs to manually create.") |
| @@ -1295,6 +1325,10 @@ to your build configuration. | |||
| 1295 | elif drive_type.startswith("/dev/hd"): | 1325 | elif drive_type.startswith("/dev/hd"): |
| 1296 | logger.info('Using ide drive') | 1326 | logger.info('Using ide drive') |
| 1297 | vm_drive = "-drive file=%s,format=%s" % (self.rootfs, rootfs_format) | 1327 | vm_drive = "-drive file=%s,format=%s" % (self.rootfs, rootfs_format) |
| 1328 | elif drive_type.startswith("/dev/mmcblk"): | ||
| 1329 | logger.info('Using sdcard drive') | ||
| 1330 | vm_drive = '-drive id=sdcard0,if=none,file=%s,format=%s -device sdhci-pci -device sd-card,drive=sdcard0' \ | ||
| 1331 | % (self.rootfs, rootfs_format) | ||
| 1298 | elif drive_type.startswith("/dev/vdb"): | 1332 | elif drive_type.startswith("/dev/vdb"): |
| 1299 | logger.info('Using block virtio drive'); | 1333 | logger.info('Using block virtio drive'); |
| 1300 | vm_drive = '-drive id=disk0,file=%s,if=none,format=%s -device virtio-blk-device,drive=disk0%s' \ | 1334 | vm_drive = '-drive id=disk0,file=%s,if=none,format=%s -device virtio-blk-device,drive=disk0%s' \ |
| @@ -1454,7 +1488,7 @@ to your build configuration. | |||
| 1454 | self.qemu_opt += ' -display ' | 1488 | self.qemu_opt += ' -display ' |
| 1455 | if self.egl_headless == True: | 1489 | if self.egl_headless == True: |
| 1456 | self.check_render_nodes() | 1490 | self.check_render_nodes() |
| 1457 | self.set_dri_path() | 1491 | self.set_mesa_paths() |
| 1458 | self.qemu_opt += 'egl-headless,' | 1492 | self.qemu_opt += 'egl-headless,' |
| 1459 | else: | 1493 | else: |
| 1460 | if self.sdl == True: | 1494 | if self.sdl == True: |
| @@ -1464,10 +1498,10 @@ to your build configuration. | |||
| 1464 | self.qemu_opt += 'gtk,' | 1498 | self.qemu_opt += 'gtk,' |
| 1465 | 1499 | ||
| 1466 | if self.gl == True: | 1500 | if self.gl == True: |
| 1467 | self.set_dri_path() | 1501 | self.set_mesa_paths() |
| 1468 | self.qemu_opt += 'gl=on,' | 1502 | self.qemu_opt += 'gl=on,' |
| 1469 | elif self.gl_es == True: | 1503 | elif self.gl_es == True: |
| 1470 | self.set_dri_path() | 1504 | self.set_mesa_paths() |
| 1471 | self.qemu_opt += 'gl=es,' | 1505 | self.qemu_opt += 'gl=es,' |
| 1472 | self.qemu_opt += 'show-cursor=on' | 1506 | self.qemu_opt += 'show-cursor=on' |
| 1473 | 1507 | ||
| @@ -1483,7 +1517,7 @@ to your build configuration. | |||
| 1483 | # If no serial or serialtcp options were specified, only ttyS0 is created | 1517 | # If no serial or serialtcp options were specified, only ttyS0 is created |
| 1484 | # and sysvinit shows an error trying to enable ttyS1: | 1518 | # and sysvinit shows an error trying to enable ttyS1: |
| 1485 | # INIT: Id "S1" respawning too fast: disabled for 5 minutes | 1519 | # INIT: Id "S1" respawning too fast: disabled for 5 minutes |
| 1486 | serial_num = len(re.findall("-serial", self.qemu_opt)) | 1520 | serial_num = len(re.findall("(^| )-serial ", self.qemu_opt)) |
| 1487 | 1521 | ||
| 1488 | # Assume if the user passed serial options, they know what they want | 1522 | # Assume if the user passed serial options, they know what they want |
| 1489 | # and pad to two devices | 1523 | # and pad to two devices |
| @@ -1503,7 +1537,7 @@ to your build configuration. | |||
| 1503 | 1537 | ||
| 1504 | self.qemu_opt += " %s" % self.get("QB_SERIAL_OPT") | 1538 | self.qemu_opt += " %s" % self.get("QB_SERIAL_OPT") |
| 1505 | 1539 | ||
| 1506 | serial_num = len(re.findall("-serial", self.qemu_opt)) | 1540 | serial_num = len(re.findall("(^| )-serial ", self.qemu_opt)) |
| 1507 | if serial_num < 2: | 1541 | if serial_num < 2: |
| 1508 | self.qemu_opt += " -serial null" | 1542 | self.qemu_opt += " -serial null" |
| 1509 | 1543 | ||
| @@ -1658,9 +1692,6 @@ to your build configuration. | |||
| 1658 | self.cleaned = True | 1692 | self.cleaned = True |
| 1659 | 1693 | ||
| 1660 | def run_bitbake_env(self, mach=None, target=''): | 1694 | def run_bitbake_env(self, mach=None, target=''): |
| 1661 | bitbake = shutil.which('bitbake') | ||
| 1662 | if not bitbake: | ||
| 1663 | return | ||
| 1664 | 1695 | ||
| 1665 | if not mach: | 1696 | if not mach: |
| 1666 | mach = self.get('MACHINE') | 1697 | mach = self.get('MACHINE') |
| @@ -1669,11 +1700,18 @@ to your build configuration. | |||
| 1669 | if multiconfig: | 1700 | if multiconfig: |
| 1670 | multiconfig = "mc:%s" % multiconfig | 1701 | multiconfig = "mc:%s" % multiconfig |
| 1671 | 1702 | ||
| 1703 | if self.rootfs and not target: | ||
| 1704 | target = self.rootfs | ||
| 1705 | |||
| 1672 | if mach: | 1706 | if mach: |
| 1673 | cmd = 'MACHINE=%s bitbake -e %s %s' % (mach, multiconfig, target) | 1707 | cmd = 'MACHINE=%s bitbake -e %s %s' % (mach, multiconfig, target) |
| 1674 | else: | 1708 | else: |
| 1675 | cmd = 'bitbake -e %s %s' % (multiconfig, target) | 1709 | cmd = 'bitbake -e %s %s' % (multiconfig, target) |
| 1676 | 1710 | ||
| 1711 | bitbake = shutil.which('bitbake') | ||
| 1712 | if not bitbake: | ||
| 1713 | raise OEPathError("Bitbake is needed to run '%s', but it is not found in PATH. Please source the bitbake build environment." % cmd.strip()) | ||
| 1714 | |||
| 1677 | logger.info('Running %s...' % cmd) | 1715 | logger.info('Running %s...' % cmd) |
| 1678 | try: | 1716 | try: |
| 1679 | return subprocess.check_output(cmd, shell=True).decode('utf-8') | 1717 | return subprocess.check_output(cmd, shell=True).decode('utf-8') |
| @@ -1685,11 +1723,7 @@ to your build configuration. | |||
| 1685 | cmd = 'MACHINE=%s bitbake -e %s %s' % (mach, multiconfig, target) | 1723 | cmd = 'MACHINE=%s bitbake -e %s %s' % (mach, multiconfig, target) |
| 1686 | else: | 1724 | else: |
| 1687 | cmd = 'bitbake -e %s %s' % (multiconfig, target) | 1725 | cmd = 'bitbake -e %s %s' % (multiconfig, target) |
| 1688 | try: | 1726 | return subprocess.check_output(cmd, shell=True).decode('utf-8') |
| 1689 | return subprocess.check_output(cmd, shell=True).decode('utf-8') | ||
| 1690 | except subprocess.CalledProcessError as err: | ||
| 1691 | logger.warning("Couldn't run '%s' to gather environment information, giving up with 'bitbake -e':\n%s" % (cmd, err.output.decode('utf-8'))) | ||
| 1692 | return '' | ||
| 1693 | 1727 | ||
| 1694 | 1728 | ||
| 1695 | def load_bitbake_env(self, mach=None, target=None): | 1729 | def load_bitbake_env(self, mach=None, target=None): |
| @@ -1749,6 +1783,7 @@ def main(): | |||
| 1749 | config.check_args() | 1783 | config.check_args() |
| 1750 | config.read_qemuboot() | 1784 | config.read_qemuboot() |
| 1751 | config.check_and_set() | 1785 | config.check_and_set() |
| 1786 | config.uncompress_rootfs() | ||
| 1752 | # Check whether the combos is valid or not | 1787 | # Check whether the combos is valid or not |
| 1753 | config.validate_combos() | 1788 | config.validate_combos() |
| 1754 | config.print_config() | 1789 | config.print_config() |
diff --git a/scripts/send-error-report b/scripts/send-error-report index cfbcaa52cb..cc1bc7c2b1 100755 --- a/scripts/send-error-report +++ b/scripts/send-error-report | |||
| @@ -6,6 +6,7 @@ | |||
| 6 | # Copyright (C) 2013 Intel Corporation | 6 | # Copyright (C) 2013 Intel Corporation |
| 7 | # Author: Andreea Proca <andreea.b.proca@intel.com> | 7 | # Author: Andreea Proca <andreea.b.proca@intel.com> |
| 8 | # Author: Michael Wood <michael.g.wood@intel.com> | 8 | # Author: Michael Wood <michael.g.wood@intel.com> |
| 9 | # Author: Thomas Perrot <thomas.perrot@bootlin.com> | ||
| 9 | # | 10 | # |
| 10 | # SPDX-License-Identifier: GPL-2.0-only | 11 | # SPDX-License-Identifier: GPL-2.0-only |
| 11 | # | 12 | # |
| @@ -22,7 +23,7 @@ scripts_lib_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'li | |||
| 22 | sys.path.insert(0, scripts_lib_path) | 23 | sys.path.insert(0, scripts_lib_path) |
| 23 | import argparse_oe | 24 | import argparse_oe |
| 24 | 25 | ||
| 25 | version = "0.3" | 26 | version = "0.4" |
| 26 | 27 | ||
| 27 | log = logging.getLogger("send-error-report") | 28 | log = logging.getLogger("send-error-report") |
| 28 | logging.basicConfig(format='%(levelname)s: %(message)s') | 29 | logging.basicConfig(format='%(levelname)s: %(message)s') |
| @@ -65,7 +66,7 @@ def edit_content(json_file_path): | |||
| 65 | 66 | ||
| 66 | def prepare_data(args): | 67 | def prepare_data(args): |
| 67 | # attempt to get the max_log_size from the server's settings | 68 | # attempt to get the max_log_size from the server's settings |
| 68 | max_log_size = getPayloadLimit(args.protocol+args.server+"/ClientPost/JSON") | 69 | max_log_size = getPayloadLimit(args.server+"/ClientPost/JSON") |
| 69 | 70 | ||
| 70 | if not os.path.isfile(args.error_file): | 71 | if not os.path.isfile(args.error_file): |
| 71 | log.error("No data file found.") | 72 | log.error("No data file found.") |
| @@ -135,19 +136,38 @@ def send_data(data, args): | |||
| 135 | headers={'Content-type': 'application/json', 'User-Agent': "send-error-report/"+version} | 136 | headers={'Content-type': 'application/json', 'User-Agent': "send-error-report/"+version} |
| 136 | 137 | ||
| 137 | if args.json: | 138 | if args.json: |
| 138 | url = args.protocol+args.server+"/ClientPost/JSON/" | 139 | url = args.server+"/ClientPost/JSON/" |
| 139 | else: | 140 | else: |
| 140 | url = args.protocol+args.server+"/ClientPost/" | 141 | url = args.server+"/ClientPost/" |
| 141 | 142 | ||
| 142 | req = urllib.request.Request(url, data=data, headers=headers) | 143 | req = urllib.request.Request(url, data=data, headers=headers) |
| 144 | |||
| 145 | log.debug(f"Request URL: {url}") | ||
| 146 | log.debug(f"Request Headers: {headers}") | ||
| 147 | log.debug(f"Request Data: {data.decode('utf-8')}") | ||
| 148 | |||
| 143 | try: | 149 | try: |
| 144 | response = urllib.request.urlopen(req) | 150 | response = urllib.request.urlopen(req) |
| 145 | except urllib.error.HTTPError as e: | 151 | except urllib.error.HTTPError as e: |
| 146 | logging.error(str(e)) | 152 | log.error(f"HTTP Error {e.code}: {e.reason}") |
| 153 | log.debug(f"Response Content: {e.read().decode('utf-8')}") | ||
| 147 | sys.exit(1) | 154 | sys.exit(1) |
| 148 | 155 | ||
| 156 | log.debug(f"Response Status: {response.status}") | ||
| 157 | log.debug(f"Response Headers: {response.getheaders()}") | ||
| 149 | print(response.read().decode('utf-8')) | 158 | print(response.read().decode('utf-8')) |
| 150 | 159 | ||
| 160 | def validate_server_url(args): | ||
| 161 | # Get the error report server from an argument | ||
| 162 | server = args.server or 'https://errors.yoctoproject.org' | ||
| 163 | |||
| 164 | if not server.startswith('http://') and not server.startswith('https://'): | ||
| 165 | log.error("Missing a URL scheme either http:// or https:// in the server name: " + server) | ||
| 166 | sys.exit(1) | ||
| 167 | |||
| 168 | # Construct the final URL | ||
| 169 | return f"{server}" | ||
| 170 | |||
| 151 | 171 | ||
| 152 | if __name__ == '__main__': | 172 | if __name__ == '__main__': |
| 153 | arg_parse = argparse_oe.ArgumentParser(description="This scripts will send an error report to your specified error-report-web server.") | 173 | arg_parse = argparse_oe.ArgumentParser(description="This scripts will send an error report to your specified error-report-web server.") |
| @@ -164,8 +184,7 @@ if __name__ == '__main__': | |||
| 164 | arg_parse.add_argument("-s", | 184 | arg_parse.add_argument("-s", |
| 165 | "--server", | 185 | "--server", |
| 166 | help="Server to send error report to", | 186 | help="Server to send error report to", |
| 167 | type=str, | 187 | type=str) |
| 168 | default="errors.yoctoproject.org") | ||
| 169 | 188 | ||
| 170 | arg_parse.add_argument("-e", | 189 | arg_parse.add_argument("-e", |
| 171 | "--email", | 190 | "--email", |
| @@ -190,18 +209,22 @@ if __name__ == '__main__': | |||
| 190 | help="Return the result in json format, silences all other output", | 209 | help="Return the result in json format, silences all other output", |
| 191 | action="store_true") | 210 | action="store_true") |
| 192 | 211 | ||
| 193 | arg_parse.add_argument("--no-ssl", | 212 | arg_parse.add_argument("-d", |
| 194 | help="Use http instead of https protocol", | 213 | "--debug", |
| 195 | dest="protocol", | 214 | help="Enable debug mode to print request/response details", |
| 196 | action="store_const", const="http://", default="https://") | 215 | action="store_true") |
| 197 | |||
| 198 | |||
| 199 | 216 | ||
| 200 | args = arg_parse.parse_args() | 217 | args = arg_parse.parse_args() |
| 201 | 218 | ||
| 219 | args.server = validate_server_url(args) | ||
| 220 | |||
| 202 | if (args.json == False): | 221 | if (args.json == False): |
| 203 | print("Preparing to send errors to: "+args.server) | 222 | print("Preparing to send errors to: "+args.server) |
| 204 | 223 | ||
| 224 | # Enable debugging if requested | ||
| 225 | if args.debug: | ||
| 226 | log.setLevel(logging.DEBUG) | ||
| 227 | |||
| 205 | data = prepare_data(args) | 228 | data = prepare_data(args) |
| 206 | send_data(data, args) | 229 | send_data(data, args) |
| 207 | 230 | ||
diff --git a/scripts/sstate-cache-management.py b/scripts/sstate-cache-management.py index d3f600bd28..303b8f13a3 100755 --- a/scripts/sstate-cache-management.py +++ b/scripts/sstate-cache-management.py | |||
| @@ -268,6 +268,10 @@ def parse_arguments(): | |||
| 268 | # ) | 268 | # ) |
| 269 | 269 | ||
| 270 | parser.add_argument( | 270 | parser.add_argument( |
| 271 | "-n", "--dry-run", action="store_true", help="Don't execute, just go through the motions." | ||
| 272 | ) | ||
| 273 | |||
| 274 | parser.add_argument( | ||
| 271 | "-y", | 275 | "-y", |
| 272 | "--yes", | 276 | "--yes", |
| 273 | action="store_true", | 277 | action="store_true", |
| @@ -314,6 +318,9 @@ def main(): | |||
| 314 | if args.debug >= 1: | 318 | if args.debug >= 1: |
| 315 | print("\n".join([str(p.path) for p in remove])) | 319 | print("\n".join([str(p.path) for p in remove])) |
| 316 | print(f"{len(remove)} out of {len(paths)} files will be removed!") | 320 | print(f"{len(remove)} out of {len(paths)} files will be removed!") |
| 321 | if args.dry_run: | ||
| 322 | return 0 | ||
| 323 | |||
| 317 | if not args.yes: | 324 | if not args.yes: |
| 318 | print("Do you want to continue (y/n)?") | 325 | print("Do you want to continue (y/n)?") |
| 319 | confirm = input() in ("y", "Y") | 326 | confirm = input() in ("y", "Y") |
diff --git a/scripts/test-remote-image b/scripts/test-remote-image index d209d22854..1d018992b0 100755 --- a/scripts/test-remote-image +++ b/scripts/test-remote-image | |||
| @@ -152,8 +152,7 @@ class AutoTargetProfile(BaseTargetProfile): | |||
| 152 | return controller | 152 | return controller |
| 153 | 153 | ||
| 154 | def set_kernel_file(self): | 154 | def set_kernel_file(self): |
| 155 | postconfig = "QA_GET_MACHINE = \"${MACHINE}\"" | 155 | machine = get_bb_var('MACHINE') |
| 156 | machine = get_bb_var('QA_GET_MACHINE', postconfig=postconfig) | ||
| 157 | self.kernel_file = self.kernel_type + '-' + machine + '.bin' | 156 | self.kernel_file = self.kernel_type + '-' + machine + '.bin' |
| 158 | 157 | ||
| 159 | def set_rootfs_file(self): | 158 | def set_rootfs_file(self): |
| @@ -215,13 +214,11 @@ class PublicAB(BaseRepoProfile): | |||
| 215 | def get_repo_path(self): | 214 | def get_repo_path(self): |
| 216 | path = '/machines/' | 215 | path = '/machines/' |
| 217 | 216 | ||
| 218 | postconfig = "QA_GET_MACHINE = \"${MACHINE}\"" | 217 | machine = get_bb_var('MACHINE') |
| 219 | machine = get_bb_var('QA_GET_MACHINE', postconfig=postconfig) | ||
| 220 | if 'qemu' in machine: | 218 | if 'qemu' in machine: |
| 221 | path += 'qemu/' | 219 | path += 'qemu/' |
| 222 | 220 | ||
| 223 | postconfig = "QA_GET_DISTRO = \"${DISTRO}\"" | 221 | distro = get_bb_var('DISTRO') |
| 224 | distro = get_bb_var('QA_GET_DISTRO', postconfig=postconfig) | ||
| 225 | path += distro.replace('poky', machine) + '/' | 222 | path += distro.replace('poky', machine) + '/' |
| 226 | return path | 223 | return path |
| 227 | 224 | ||
diff --git a/scripts/wic b/scripts/wic index 06e0b48db0..9137208f5e 100755 --- a/scripts/wic +++ b/scripts/wic | |||
| @@ -237,6 +237,13 @@ def wic_ls_subcommand(args, usage_str): | |||
| 237 | Command-line handling for list content of images. | 237 | Command-line handling for list content of images. |
| 238 | The real work is done by engine.wic_ls() | 238 | The real work is done by engine.wic_ls() |
| 239 | """ | 239 | """ |
| 240 | |||
| 241 | if args.image_name: | ||
| 242 | BB_VARS.default_image = args.image_name | ||
| 243 | |||
| 244 | if args.vars_dir: | ||
| 245 | BB_VARS.vars_dir = args.vars_dir | ||
| 246 | |||
| 240 | engine.wic_ls(args, args.native_sysroot) | 247 | engine.wic_ls(args, args.native_sysroot) |
| 241 | 248 | ||
| 242 | def wic_cp_subcommand(args, usage_str): | 249 | def wic_cp_subcommand(args, usage_str): |
| @@ -244,6 +251,12 @@ def wic_cp_subcommand(args, usage_str): | |||
| 244 | Command-line handling for copying files/dirs to images. | 251 | Command-line handling for copying files/dirs to images. |
| 245 | The real work is done by engine.wic_cp() | 252 | The real work is done by engine.wic_cp() |
| 246 | """ | 253 | """ |
| 254 | if args.image_name: | ||
| 255 | BB_VARS.default_image = args.image_name | ||
| 256 | |||
| 257 | if args.vars_dir: | ||
| 258 | BB_VARS.vars_dir = args.vars_dir | ||
| 259 | |||
| 247 | engine.wic_cp(args, args.native_sysroot) | 260 | engine.wic_cp(args, args.native_sysroot) |
| 248 | 261 | ||
| 249 | def wic_rm_subcommand(args, usage_str): | 262 | def wic_rm_subcommand(args, usage_str): |
| @@ -251,6 +264,12 @@ def wic_rm_subcommand(args, usage_str): | |||
| 251 | Command-line handling for removing files/dirs from images. | 264 | Command-line handling for removing files/dirs from images. |
| 252 | The real work is done by engine.wic_rm() | 265 | The real work is done by engine.wic_rm() |
| 253 | """ | 266 | """ |
| 267 | if args.image_name: | ||
| 268 | BB_VARS.default_image = args.image_name | ||
| 269 | |||
| 270 | if args.vars_dir: | ||
| 271 | BB_VARS.vars_dir = args.vars_dir | ||
| 272 | |||
| 254 | engine.wic_rm(args, args.native_sysroot) | 273 | engine.wic_rm(args, args.native_sysroot) |
| 255 | 274 | ||
| 256 | def wic_write_subcommand(args, usage_str): | 275 | def wic_write_subcommand(args, usage_str): |
| @@ -258,6 +277,12 @@ def wic_write_subcommand(args, usage_str): | |||
| 258 | Command-line handling for writing images. | 277 | Command-line handling for writing images. |
| 259 | The real work is done by engine.wic_write() | 278 | The real work is done by engine.wic_write() |
| 260 | """ | 279 | """ |
| 280 | if args.image_name: | ||
| 281 | BB_VARS.default_image = args.image_name | ||
| 282 | |||
| 283 | if args.vars_dir: | ||
| 284 | BB_VARS.vars_dir = args.vars_dir | ||
| 285 | |||
| 261 | engine.wic_write(args, args.native_sysroot) | 286 | engine.wic_write(args, args.native_sysroot) |
| 262 | 287 | ||
| 263 | def wic_help_subcommand(args, usage_str): | 288 | def wic_help_subcommand(args, usage_str): |
| @@ -390,6 +415,12 @@ def wic_init_parser_ls(subparser): | |||
| 390 | help="image spec: <image>[:<vfat partition>[<path>]]") | 415 | help="image spec: <image>[:<vfat partition>[<path>]]") |
| 391 | subparser.add_argument("-n", "--native-sysroot", | 416 | subparser.add_argument("-n", "--native-sysroot", |
| 392 | help="path to the native sysroot containing the tools") | 417 | help="path to the native sysroot containing the tools") |
| 418 | subparser.add_argument("-e", "--image-name", dest="image_name", | ||
| 419 | help="name of the image to use the artifacts from " | ||
| 420 | "e.g. core-image-sato") | ||
| 421 | subparser.add_argument("-v", "--vars", dest='vars_dir', | ||
| 422 | help="directory with <image>.env files that store " | ||
| 423 | "bitbake variables") | ||
| 393 | 424 | ||
| 394 | def imgpathtype(arg): | 425 | def imgpathtype(arg): |
| 395 | img = imgtype(arg) | 426 | img = imgtype(arg) |
| @@ -404,6 +435,12 @@ def wic_init_parser_cp(subparser): | |||
| 404 | help="image spec: <image>:<vfat partition>[<path>] or <file>") | 435 | help="image spec: <image>:<vfat partition>[<path>] or <file>") |
| 405 | subparser.add_argument("-n", "--native-sysroot", | 436 | subparser.add_argument("-n", "--native-sysroot", |
| 406 | help="path to the native sysroot containing the tools") | 437 | help="path to the native sysroot containing the tools") |
| 438 | subparser.add_argument("-e", "--image-name", dest="image_name", | ||
| 439 | help="name of the image to use the artifacts from " | ||
| 440 | "e.g. core-image-sato") | ||
| 441 | subparser.add_argument("-v", "--vars", dest='vars_dir', | ||
| 442 | help="directory with <image>.env files that store " | ||
| 443 | "bitbake variables") | ||
| 407 | 444 | ||
| 408 | def wic_init_parser_rm(subparser): | 445 | def wic_init_parser_rm(subparser): |
| 409 | subparser.add_argument("path", type=imgpathtype, | 446 | subparser.add_argument("path", type=imgpathtype, |
| @@ -413,6 +450,12 @@ def wic_init_parser_rm(subparser): | |||
| 413 | subparser.add_argument("-r", dest="recursive_delete", action="store_true", default=False, | 450 | subparser.add_argument("-r", dest="recursive_delete", action="store_true", default=False, |
| 414 | help="remove directories and their contents recursively, " | 451 | help="remove directories and their contents recursively, " |
| 415 | " this only applies to ext* partition") | 452 | " this only applies to ext* partition") |
| 453 | subparser.add_argument("-e", "--image-name", dest="image_name", | ||
| 454 | help="name of the image to use the artifacts from " | ||
| 455 | "e.g. core-image-sato") | ||
| 456 | subparser.add_argument("-v", "--vars", dest='vars_dir', | ||
| 457 | help="directory with <image>.env files that store " | ||
| 458 | "bitbake variables") | ||
| 416 | 459 | ||
| 417 | def expandtype(rules): | 460 | def expandtype(rules): |
| 418 | """ | 461 | """ |
| @@ -454,6 +497,12 @@ def wic_init_parser_write(subparser): | |||
| 454 | help="expand rules: auto or <partition>:<size>[,<partition>:<size>]") | 497 | help="expand rules: auto or <partition>:<size>[,<partition>:<size>]") |
| 455 | subparser.add_argument("-n", "--native-sysroot", | 498 | subparser.add_argument("-n", "--native-sysroot", |
| 456 | help="path to the native sysroot containing the tools") | 499 | help="path to the native sysroot containing the tools") |
| 500 | subparser.add_argument("--image-name", dest="image_name", | ||
| 501 | help="name of the image to use the artifacts from " | ||
| 502 | "e.g. core-image-sato") | ||
| 503 | subparser.add_argument("-v", "--vars", dest='vars_dir', | ||
| 504 | help="directory with <image>.env files that store " | ||
| 505 | "bitbake variables") | ||
| 457 | 506 | ||
| 458 | def wic_init_parser_help(subparser): | 507 | def wic_init_parser_help(subparser): |
| 459 | helpparsers = subparser.add_subparsers(dest='help_topic', help=hlp.wic_usage) | 508 | helpparsers = subparser.add_subparsers(dest='help_topic', help=hlp.wic_usage) |
diff --git a/scripts/yocto-check-layer b/scripts/yocto-check-layer index 67cc71950f..8c8b4dbcad 100755 --- a/scripts/yocto-check-layer +++ b/scripts/yocto-check-layer | |||
| @@ -35,6 +35,7 @@ logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout) | |||
| 35 | def test_layer(td, layer, test_software_layer_signatures): | 35 | def test_layer(td, layer, test_software_layer_signatures): |
| 36 | from checklayer.context import CheckLayerTestContext | 36 | from checklayer.context import CheckLayerTestContext |
| 37 | logger.info("Starting to analyze: %s" % layer['name']) | 37 | logger.info("Starting to analyze: %s" % layer['name']) |
| 38 | logger.info('Distro: %s' % td['bbvars']["DISTRO"]) | ||
| 38 | logger.info("----------------------------------------------------------------------") | 39 | logger.info("----------------------------------------------------------------------") |
| 39 | 40 | ||
| 40 | tc = CheckLayerTestContext(td=td, logger=logger, layer=layer, test_software_layer_signatures=test_software_layer_signatures) | 41 | tc = CheckLayerTestContext(td=td, logger=logger, layer=layer, test_software_layer_signatures=test_software_layer_signatures) |
