diff options
Diffstat (limited to 'scripts')
84 files changed, 2765 insertions, 1944 deletions
diff --git a/scripts/b4-wrapper-poky.py b/scripts/b4-wrapper-poky.py new file mode 100755 index 0000000000..f1170db06b --- /dev/null +++ b/scripts/b4-wrapper-poky.py | |||
@@ -0,0 +1,185 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # | ||
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
5 | # SPDX-License-Identifier: MIT | ||
6 | # | ||
7 | # This script is to be called by b4: | ||
8 | # - through the b4.prep-perpatch-check-cmd with "prep-perpatch-check-cmd" as | ||
9 | # first argument, | ||
10 | # - through b4.send-auto-cc-cmd with "send-auto-cc-cmd" as first argument, | ||
11 | # - through b4.send-auto-to-cmd with "send-auto-to-cmd" as first argument, | ||
12 | # | ||
13 | # When prep-perpatch-check-cmd is passsed: | ||
14 | # | ||
15 | # This checks that a patch makes changes to at most one project in the poky | ||
16 | # combo repo (that is, out of yocto-docs, bitbake, openembedded-core combined | ||
17 | # into poky and the poky-specific files). | ||
18 | # | ||
19 | # Printing something to stdout in this file will result in b4 prep --check fail | ||
20 | # for the currently parsed patch. | ||
21 | # | ||
22 | # It checks that all patches in the series make changes to at most one project. | ||
23 | # | ||
24 | # When send-auto-cc-cmd is passed: | ||
25 | # | ||
26 | # This returns the list of Cc recipients for a patch. | ||
27 | # | ||
28 | # When send-auto-to-cmd is passed: | ||
29 | # | ||
30 | # This returns the list of To recipients for a patch. | ||
31 | # | ||
32 | # This script takes as stdin a patch. | ||
33 | |||
34 | import pathlib | ||
35 | import re | ||
36 | import shutil | ||
37 | import subprocess | ||
38 | import sys | ||
39 | |||
40 | cmd = sys.argv[1] | ||
41 | |||
42 | patch = sys.stdin.readlines() | ||
43 | |||
44 | # Subject field is used to identify the last patch as this script is called for | ||
45 | # each patch. We edit the same file in a series by using the References field | ||
46 | # unique identifier to check which projects are modified by earlier patches in | ||
47 | # the series. To avoid cluttering the disk, the last patch in the list removes | ||
48 | # that shared file. | ||
49 | re_subject = re.compile(r'^Subject:.*\[.*PATCH.*\s(\d+)/\1') | ||
50 | re_ref = re.compile(r'^References: <(.*)>$') | ||
51 | |||
52 | subject = None | ||
53 | ref = None | ||
54 | |||
55 | if not shutil.which("lsdiff"): | ||
56 | print("lsdiff missing from host, please install patchutils", file=sys.stderr) | ||
57 | sys.exit(-1) | ||
58 | |||
59 | try: | ||
60 | one_patch_series = False | ||
61 | for line in patch: | ||
62 | subject = re_subject.match(line) | ||
63 | if subject: | ||
64 | # Handle [PATCH 1/1] | ||
65 | if subject.group(1) == 1: | ||
66 | one_patch_series = True | ||
67 | break | ||
68 | if re.match(r'^Subject: .*\[.*PATCH[^/]*\]', line): | ||
69 | # Single patch is named [PATCH] but if there are prefix, it could be | ||
70 | # [PATCH prefix], so handle everything that doesn't have a / | ||
71 | # character which is used as separator between current patch number | ||
72 | # and total patch number | ||
73 | one_patch_series = True | ||
74 | break | ||
75 | |||
76 | if cmd == "prep-perpatch-check-cmd" and not one_patch_series: | ||
77 | for line in patch: | ||
78 | ref = re_ref.match(line) | ||
79 | if ref: | ||
80 | break | ||
81 | |||
82 | if not ref: | ||
83 | print("Failed to find ref to cover letter (References:)...", file=sys.stderr) | ||
84 | sys.exit(-2) | ||
85 | |||
86 | ref = ref.group(1) | ||
87 | series_check = pathlib.Path(f".tmp-{ref}") | ||
88 | |||
89 | patch = "".join(patch) | ||
90 | |||
91 | if cmd == "send-auto-cc-cmd": | ||
92 | # Patches to BitBake documentation should also go to yocto-docs mailing list | ||
93 | project_paths = { | ||
94 | "yocto-docs": ["bitbake/doc/*"], | ||
95 | } | ||
96 | else: | ||
97 | project_paths = { | ||
98 | "bitbake": ["bitbake/*"], | ||
99 | "yocto-docs": ["documentation/*"], | ||
100 | "poky": [ | ||
101 | "meta-poky/*", | ||
102 | "meta-yocto-bsp/*", | ||
103 | "README.hardware.md", | ||
104 | "README.poky.md", | ||
105 | # scripts/b4-wrapper-poky.py is only run by b4 when in poky | ||
106 | # git repo. With that limitation, changes made to .b4-config | ||
107 | # can only be for poky's and not OE-Core's as only poky's is | ||
108 | # stored in poky git repo. | ||
109 | ".b4-config", | ||
110 | ], | ||
111 | } | ||
112 | |||
113 | # List of projects touched by this patch | ||
114 | projs = [] | ||
115 | |||
116 | # Any file not matched by any path in project_paths means it is from | ||
117 | # OE-Core. | ||
118 | # When matching some path in project_paths, remove the matched files from | ||
119 | # that list. | ||
120 | files_left = subprocess.check_output(["lsdiff", "--strip-match=1", "--strip=1"], | ||
121 | input=patch, text=True) | ||
122 | files_left = set(files_left) | ||
123 | |||
124 | for proj, proj_paths in project_paths.items(): | ||
125 | lsdiff_args = [f"--include={path}" for path in proj_paths] | ||
126 | files = subprocess.check_output(["lsdiff", "--strip-match=1", "--strip=1"] + lsdiff_args, | ||
127 | input=patch, text=True) | ||
128 | if len(files): | ||
129 | files_left = files_left - set(files) | ||
130 | projs.append(proj) | ||
131 | continue | ||
132 | |||
133 | # Handle patches made with --no-prefix | ||
134 | files = subprocess.check_output(["lsdiff"] + lsdiff_args, | ||
135 | input=patch, text=True) | ||
136 | if len(files): | ||
137 | files_left = files_left - set(files) | ||
138 | projs.append(proj) | ||
139 | |||
140 | # Catch-all for everything not poky-specific or in bitbake/yocto-docs | ||
141 | if len(files_left) and cmd != "send-auto-cc-cmd": | ||
142 | projs.append("openembedded-core") | ||
143 | |||
144 | if cmd == "prep-perpatch-check-cmd": | ||
145 | if len(projs) > 1: | ||
146 | print(f"Diff spans more than one project ({', '.join(sorted(projs))}), split into multiple commits...", | ||
147 | file=sys.stderr) | ||
148 | sys.exit(-3) | ||
149 | |||
150 | # No need to check other patches in the series as there aren't any | ||
151 | if one_patch_series: | ||
152 | sys.exit(0) | ||
153 | |||
154 | # This should be replaced once b4 supports prep-perseries-check-cmd (or something similar) | ||
155 | |||
156 | if series_check.exists(): | ||
157 | # NOT race-free if b4 decides to parallelize prep-perpatch-check-cmd | ||
158 | series_projs = series_check.read_text().split('\n') | ||
159 | else: | ||
160 | series_projs = [] | ||
161 | |||
162 | series_projs += projs | ||
163 | uniq_series_projs = set(series_projs) | ||
164 | # NOT race-free, if b4 decides to parallelize prep-perpatch-check-cmd | ||
165 | series_check.write_text('\n'.join(uniq_series_projs)) | ||
166 | |||
167 | if len(uniq_series_projs) > 1: | ||
168 | print(f"Series spans more than one project ({', '.join(sorted(uniq_series_projs))}), split into multiple series...", | ||
169 | file=sys.stderr) | ||
170 | sys.exit(-4) | ||
171 | else: # send-auto-cc-cmd / send-auto-to-cmd | ||
172 | ml_projs = { | ||
173 | "bitbake": "bitbake-devel@lists.openembedded.org", | ||
174 | "yocto-docs": "docs@lists.yoctoproject.org", | ||
175 | "poky": "poky@lists.yoctoproject.org", | ||
176 | "openembedded-core": "openembedded-core@lists.openembedded.org", | ||
177 | } | ||
178 | |||
179 | print("\n".join([ml_projs[ml] for ml in projs])) | ||
180 | |||
181 | sys.exit(0) | ||
182 | finally: | ||
183 | # Last patch in the series, cleanup tmp file | ||
184 | if subject and ref and series_check.exists(): | ||
185 | series_check.unlink() | ||
diff --git a/scripts/bitbake-prserv-tool b/scripts/bitbake-prserv-tool index 80028342b1..0559c4c38a 100755 --- a/scripts/bitbake-prserv-tool +++ b/scripts/bitbake-prserv-tool | |||
@@ -55,43 +55,6 @@ do_import () | |||
55 | return $ret | 55 | return $ret |
56 | } | 56 | } |
57 | 57 | ||
58 | do_migrate_localcount () | ||
59 | { | ||
60 | df=`bitbake -R conf/migrate_localcount.conf -e | \ | ||
61 | grep ^LOCALCOUNT_DUMPFILE= | cut -f2 -d\"` | ||
62 | if [ "x${df}" == "x" ]; | ||
63 | then | ||
64 | echo "LOCALCOUNT_DUMPFILE is not defined!" | ||
65 | return 1 | ||
66 | fi | ||
67 | |||
68 | rm -f $df | ||
69 | clean_cache | ||
70 | echo "Exporting LOCALCOUNT to AUTOINCs..." | ||
71 | bitbake -R conf/migrate_localcount.conf -p | ||
72 | [ ! $? -eq 0 ] && echo "Exporting to file $df failed!" && exit 1 | ||
73 | |||
74 | if [ -e $df ]; | ||
75 | then | ||
76 | echo "Exporting to file $df succeeded!" | ||
77 | else | ||
78 | echo "Exporting to file $df failed!" | ||
79 | exit 1 | ||
80 | fi | ||
81 | |||
82 | echo "Importing generated AUTOINC entries..." | ||
83 | [ -e $df ] && do_import $df | ||
84 | |||
85 | if [ ! $? -eq 0 ] | ||
86 | then | ||
87 | echo "Migration from LOCALCOUNT to AUTOINCs failed!" | ||
88 | return 1 | ||
89 | fi | ||
90 | |||
91 | echo "Migration from LOCALCOUNT to AUTOINCs succeeded!" | ||
92 | return 0 | ||
93 | } | ||
94 | |||
95 | [ $# -eq 0 ] && help && exit 1 | 58 | [ $# -eq 0 ] && help && exit 1 |
96 | 59 | ||
97 | case $2 in | 60 | case $2 in |
@@ -110,9 +73,6 @@ export) | |||
110 | import) | 73 | import) |
111 | do_import $2 | 74 | do_import $2 |
112 | ;; | 75 | ;; |
113 | migrate_localcount) | ||
114 | do_migrate_localcount | ||
115 | ;; | ||
116 | *) | 76 | *) |
117 | help | 77 | help |
118 | exit 1 | 78 | exit 1 |
diff --git a/scripts/buildstats-diff b/scripts/buildstats-diff index c9aa76a8fa..df1df432f1 100755 --- a/scripts/buildstats-diff +++ b/scripts/buildstats-diff | |||
@@ -12,6 +12,7 @@ import glob | |||
12 | import logging | 12 | import logging |
13 | import math | 13 | import math |
14 | import os | 14 | import os |
15 | import pathlib | ||
15 | import sys | 16 | import sys |
16 | from operator import attrgetter | 17 | from operator import attrgetter |
17 | 18 | ||
@@ -251,11 +252,32 @@ Script for comparing buildstats of two separate builds.""" | |||
251 | "average over them") | 252 | "average over them") |
252 | parser.add_argument('--only-task', dest='only_tasks', metavar='TASK', action='append', default=[], | 253 | parser.add_argument('--only-task', dest='only_tasks', metavar='TASK', action='append', default=[], |
253 | help="Only include TASK in report. May be specified multiple times") | 254 | help="Only include TASK in report. May be specified multiple times") |
254 | parser.add_argument('buildstats1', metavar='BUILDSTATS1', help="'Left' buildstat") | 255 | parser.add_argument('buildstats1', metavar='BUILDSTATS1', nargs="?", help="'Left' buildstat") |
255 | parser.add_argument('buildstats2', metavar='BUILDSTATS2', help="'Right' buildstat") | 256 | parser.add_argument('buildstats2', metavar='BUILDSTATS2', nargs="?", help="'Right' buildstat") |
256 | 257 | ||
257 | args = parser.parse_args(argv) | 258 | args = parser.parse_args(argv) |
258 | 259 | ||
260 | if args.buildstats1 and args.buildstats2: | ||
261 | # Both paths specified | ||
262 | pass | ||
263 | elif args.buildstats1 or args.buildstats2: | ||
264 | # Just one path specified, this is an error | ||
265 | parser.print_usage(sys.stderr) | ||
266 | print("Either specify two buildstats paths, or none to use the last two paths.", file=sys.stderr) | ||
267 | sys.exit(1) | ||
268 | else: | ||
269 | # No paths specified, try to find the last two buildstats | ||
270 | try: | ||
271 | buildstats_dir = pathlib.Path(os.environ["BUILDDIR"]) / "tmp" / "buildstats" | ||
272 | paths = sorted(buildstats_dir.iterdir()) | ||
273 | args.buildstats2 = paths.pop() | ||
274 | args.buildstats1 = paths.pop() | ||
275 | print(f"Comparing {args.buildstats1} -> {args.buildstats2}\n") | ||
276 | except KeyError: | ||
277 | parser.print_usage(sys.stderr) | ||
278 | print("Build environment has not been configured, cannot find buildstats", file=sys.stderr) | ||
279 | sys.exit(1) | ||
280 | |||
259 | # We do not nedd/want to read all buildstats if we just want to look at the | 281 | # We do not nedd/want to read all buildstats if we just want to look at the |
260 | # package versions | 282 | # package versions |
261 | if args.ver_diff: | 283 | if args.ver_diff: |
diff --git a/scripts/buildstats-summary b/scripts/buildstats-summary index b10c671b29..cc2a27722a 100755 --- a/scripts/buildstats-summary +++ b/scripts/buildstats-summary | |||
@@ -87,7 +87,11 @@ def main(argv=None) -> int: | |||
87 | ) | 87 | ) |
88 | 88 | ||
89 | parser.add_argument( | 89 | parser.add_argument( |
90 | "buildstats", metavar="BUILDSTATS", help="Buildstats file", type=pathlib.Path | 90 | "buildstats", |
91 | metavar="BUILDSTATS", | ||
92 | nargs="?", | ||
93 | type=pathlib.Path, | ||
94 | help="Buildstats file, or latest if not specified", | ||
91 | ) | 95 | ) |
92 | parser.add_argument( | 96 | parser.add_argument( |
93 | "--sort", | 97 | "--sort", |
@@ -116,6 +120,16 @@ def main(argv=None) -> int: | |||
116 | 120 | ||
117 | args = parser.parse_args(argv) | 121 | args = parser.parse_args(argv) |
118 | 122 | ||
123 | # If a buildstats file wasn't specified, try to find the last one | ||
124 | if not args.buildstats: | ||
125 | try: | ||
126 | builddir = pathlib.Path(os.environ["BUILDDIR"]) | ||
127 | buildstats_dir = builddir / "tmp" / "buildstats" | ||
128 | args.buildstats = sorted(buildstats_dir.iterdir())[-1] | ||
129 | except KeyError: | ||
130 | print("Build environment has not been configured, cannot find buildstats") | ||
131 | return 1 | ||
132 | |||
119 | bs = read_buildstats(args.buildstats) | 133 | bs = read_buildstats(args.buildstats) |
120 | dump_buildstats(args, bs) | 134 | dump_buildstats(args, bs) |
121 | 135 | ||
diff --git a/scripts/clean-hashserver-database b/scripts/clean-hashserver-database new file mode 100755 index 0000000000..9fa162c981 --- /dev/null +++ b/scripts/clean-hashserver-database | |||
@@ -0,0 +1,77 @@ | |||
1 | #!/bin/bash | ||
2 | set -euo pipefail | ||
3 | |||
4 | SSTATE_DIR="" | ||
5 | BB_HASHCLIENT="" | ||
6 | BB_HASHSERVER="" | ||
7 | |||
8 | ALIVE_DB_MARK="alive" | ||
9 | CLEAN_DB="false" | ||
10 | THRESHOLD_AGE="3600" | ||
11 | |||
12 | function help() { | ||
13 | cat <<HELP_TEXT | ||
14 | Usage: $0 --sstate-dir path --hashclient path --hashserver-address address \ | ||
15 | [--mark value] [--clean-db] [--threshold-age seconds] | ||
16 | |||
17 | Auxiliary script remove unused or no longer relevant entries from the hashequivalence database, based | ||
18 | on the files available on the sstate directory. | ||
19 | |||
20 | -h | --help) Show this help message and exit | ||
21 | -a | --hashserver-adress) bitbake-hashserver address | ||
22 | -c | --hashclient) Path to bitbake-hashclient | ||
23 | -m | --mark) Marker string to mark database entries | ||
24 | -s | --sstate-dir) Path to the sstate dir | ||
25 | -t | --threshold-age) Remove unused entries older than SECONDS old (default: 3600) | ||
26 | --clean-db) Remove all unmarked and unused entries from the database | ||
27 | HELP_TEXT | ||
28 | } | ||
29 | |||
30 | function argument_parser() { | ||
31 | while [ $# -gt 0 ]; do | ||
32 | case "$1" in | ||
33 | -h | --help) help; exit 0 ;; | ||
34 | -a | --hashserver-address) BB_HASHSERVER="$2"; shift ;; | ||
35 | -c | --hashclient) BB_HASHCLIENT="$2"; shift ;; | ||
36 | -m | --mark) ALIVE_DB_MARK="$2"; shift ;; | ||
37 | -s | --sstate-dir) SSTATE_DIR="$2"; shift ;; | ||
38 | -t | --threshold-age) THRESHOLD_AGE="$2"; shift ;; | ||
39 | --clean-db) CLEAN_DB="true";; | ||
40 | *) | ||
41 | echo "Argument '$1' is not supported" >&2 | ||
42 | help >&2 | ||
43 | exit 1 | ||
44 | ;; | ||
45 | esac | ||
46 | shift | ||
47 | done | ||
48 | |||
49 | function validate_mandatory_argument() { | ||
50 | local var_value="$1" | ||
51 | local error_message="$2" | ||
52 | |||
53 | if [ -z "$var_value" ]; then | ||
54 | echo "$error_message" | ||
55 | help >&2 | ||
56 | exit 1 | ||
57 | fi | ||
58 | } | ||
59 | |||
60 | validate_mandatory_argument "$SSTATE_DIR" "Please provide the path to the sstate dir." | ||
61 | validate_mandatory_argument "$BB_HASHCLIENT" "Please provide the path to bitbake-hashclient." | ||
62 | validate_mandatory_argument "$BB_HASHSERVER" "Please provide the address of bitbake-hashserver." | ||
63 | } | ||
64 | |||
65 | # -- main code -- | ||
66 | argument_parser $@ | ||
67 | |||
68 | # Mark all db sstate hashes | ||
69 | find "$SSTATE_DIR" -name "*.tar.zst" | \ | ||
70 | sed 's/.*:\([^_]*\)_.*/unihash \1/' | \ | ||
71 | $BB_HASHCLIENT --address "$BB_HASHSERVER" gc-mark-stream "$ALIVE_DB_MARK" | ||
72 | |||
73 | # Remove unmarked and unused entries | ||
74 | if [ "$CLEAN_DB" = "true" ]; then | ||
75 | $BB_HASHCLIENT --address "$BB_HASHSERVER" gc-sweep "$ALIVE_DB_MARK" | ||
76 | $BB_HASHCLIENT --address "$BB_HASHSERVER" clean-unused "$THRESHOLD_AGE" | ||
77 | fi | ||
diff --git a/scripts/contrib/convert-spdx-licenses.py b/scripts/contrib/convert-spdx-licenses.py index 4e194dee3f..13cf12a33f 100755 --- a/scripts/contrib/convert-spdx-licenses.py +++ b/scripts/contrib/convert-spdx-licenses.py | |||
@@ -93,7 +93,7 @@ license_map = { | |||
93 | "Nauman" : "Naumen", | 93 | "Nauman" : "Naumen", |
94 | "tcl" : "TCL", | 94 | "tcl" : "TCL", |
95 | "vim" : "Vim", | 95 | "vim" : "Vim", |
96 | "SGIv1" : "SGI-1", | 96 | "SGIv1" : "SGI-OpenGL", |
97 | } | 97 | } |
98 | 98 | ||
99 | def processfile(fn): | 99 | def processfile(fn): |
diff --git a/scripts/contrib/improve_kernel_cve_report.py b/scripts/contrib/improve_kernel_cve_report.py new file mode 100755 index 0000000000..829cc4cd30 --- /dev/null +++ b/scripts/contrib/improve_kernel_cve_report.py | |||
@@ -0,0 +1,467 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | # | ||
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
5 | # The script uses another source of CVE information from linux-vulns | ||
6 | # to enrich the cve-summary from cve-check or vex. | ||
7 | # It can also use the list of compiled files from the kernel spdx to ignore CVEs | ||
8 | # that are not affected since the files are not compiled. | ||
9 | # | ||
10 | # It creates a new json file with updated CVE information | ||
11 | # | ||
12 | # Compiled files can be extracted adding the following in local.conf | ||
13 | # SPDX_INCLUDE_COMPILED_SOURCES:pn-linux-yocto = "1" | ||
14 | # | ||
15 | # Tested with the following CVE sources: | ||
16 | # - https://git.kernel.org/pub/scm/linux/security/vulns.git | ||
17 | # - https://github.com/CVEProject/cvelistV5 | ||
18 | # | ||
19 | # Example: | ||
20 | # python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --kernel-version 6.12.27 --datadir ./vulns | ||
21 | # python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --datadir ./vulns --old-cve-report build/tmp/log/cve/cve-summary.json | ||
22 | # | ||
23 | # SPDX-License-Identifier: GPLv2 | ||
24 | |||
25 | import argparse | ||
26 | import json | ||
27 | import sys | ||
28 | import logging | ||
29 | import glob | ||
30 | import os | ||
31 | import pathlib | ||
32 | from packaging.version import Version | ||
33 | |||
34 | def is_linux_cve(cve_info): | ||
35 | '''Return true is the CVE belongs to Linux''' | ||
36 | if not "affected" in cve_info["containers"]["cna"]: | ||
37 | return False | ||
38 | for affected in cve_info["containers"]["cna"]["affected"]: | ||
39 | if not "product" in affected: | ||
40 | return False | ||
41 | if affected["product"] == "Linux" and affected["vendor"] == "Linux": | ||
42 | return True | ||
43 | return False | ||
44 | |||
45 | def get_kernel_cves(datadir, compiled_files, version): | ||
46 | """ | ||
47 | Get CVEs for the kernel | ||
48 | """ | ||
49 | cves = {} | ||
50 | |||
51 | check_config = len(compiled_files) > 0 | ||
52 | |||
53 | base_version = Version(f"{version.major}.{version.minor}") | ||
54 | |||
55 | # Check all CVES from kernel vulns | ||
56 | pattern = os.path.join(datadir, '**', "CVE-*.json") | ||
57 | cve_files = glob.glob(pattern, recursive=True) | ||
58 | not_applicable_config = 0 | ||
59 | fixed_as_later_backport = 0 | ||
60 | vulnerable = 0 | ||
61 | not_vulnerable = 0 | ||
62 | for cve_file in sorted(cve_files): | ||
63 | cve_info = {} | ||
64 | with open(cve_file, "r", encoding='ISO-8859-1') as f: | ||
65 | cve_info = json.load(f) | ||
66 | |||
67 | if len(cve_info) == 0: | ||
68 | logging.error("Not valid data in %s. Aborting", cve_file) | ||
69 | break | ||
70 | |||
71 | if not is_linux_cve(cve_info): | ||
72 | continue | ||
73 | cve_id = os.path.basename(cve_file)[:-5] | ||
74 | description = cve_info["containers"]["cna"]["descriptions"][0]["value"] | ||
75 | if cve_file.find("rejected") >= 0: | ||
76 | logging.debug("%s is rejected by the CNA", cve_id) | ||
77 | cves[cve_id] = { | ||
78 | "id": cve_id, | ||
79 | "status": "Ignored", | ||
80 | "detail": "rejected", | ||
81 | "summary": description, | ||
82 | "description": f"Rejected by CNA" | ||
83 | } | ||
84 | continue | ||
85 | if any(elem in cve_file for elem in ["review", "reverved", "testing"]): | ||
86 | continue | ||
87 | |||
88 | is_vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected_versions = get_cpe_applicability(cve_info, version) | ||
89 | |||
90 | logging.debug("%s: %s (%s - %s) (%s - %s)", cve_id, is_vulnerable, better_match_first, better_match_last, first_affected, last_affected) | ||
91 | |||
92 | if is_vulnerable is None: | ||
93 | logging.warning("%s doesn't have good metadata", cve_id) | ||
94 | if is_vulnerable: | ||
95 | is_affected = True | ||
96 | affected_files = [] | ||
97 | if check_config: | ||
98 | is_affected, affected_files = check_kernel_compiled_files(compiled_files, cve_info) | ||
99 | |||
100 | if not is_affected and len(affected_files) > 0: | ||
101 | logging.debug( | ||
102 | "%s - not applicable configuration since affected files not compiled: %s", | ||
103 | cve_id, affected_files) | ||
104 | cves[cve_id] = { | ||
105 | "id": cve_id, | ||
106 | "status": "Ignored", | ||
107 | "detail": "not-applicable-config", | ||
108 | "summary": description, | ||
109 | "description": f"Source code not compiled by config. {affected_files}" | ||
110 | } | ||
111 | not_applicable_config +=1 | ||
112 | # Check if we have backport | ||
113 | else: | ||
114 | if not better_match_last: | ||
115 | fixed_in = last_affected | ||
116 | else: | ||
117 | fixed_in = better_match_last | ||
118 | logging.debug("%s needs backporting (fixed from %s)", cve_id, fixed_in) | ||
119 | cves[cve_id] = { | ||
120 | "id": cve_id, | ||
121 | "status": "Unpatched", | ||
122 | "detail": "version-in-range", | ||
123 | "summary": description, | ||
124 | "description": f"Needs backporting (fixed from {fixed_in})" | ||
125 | } | ||
126 | vulnerable += 1 | ||
127 | if (better_match_last and | ||
128 | Version(f"{better_match_last.major}.{better_match_last.minor}") == base_version): | ||
129 | fixed_as_later_backport += 1 | ||
130 | # Not vulnerable | ||
131 | else: | ||
132 | if not first_affected: | ||
133 | logging.debug("%s - not known affected %s", | ||
134 | cve_id, | ||
135 | better_match_last) | ||
136 | cves[cve_id] = { | ||
137 | "id": cve_id, | ||
138 | "status": "Patched", | ||
139 | "detail": "version-not-in-range", | ||
140 | "summary": description, | ||
141 | "description": "No CPE match" | ||
142 | } | ||
143 | not_vulnerable += 1 | ||
144 | continue | ||
145 | backport_base = Version(f"{better_match_last.major}.{better_match_last.minor}") | ||
146 | if version < first_affected: | ||
147 | logging.debug('%s - fixed-version: only affects %s onwards', | ||
148 | cve_id, | ||
149 | first_affected) | ||
150 | cves[cve_id] = { | ||
151 | "id": cve_id, | ||
152 | "status": "Patched", | ||
153 | "detail": "fixed-version", | ||
154 | "summary": description, | ||
155 | "description": f"only affects {first_affected} onwards" | ||
156 | } | ||
157 | not_vulnerable += 1 | ||
158 | elif last_affected <= version: | ||
159 | logging.debug("%s - fixed-version: Fixed from version %s", | ||
160 | cve_id, | ||
161 | last_affected) | ||
162 | cves[cve_id] = { | ||
163 | "id": cve_id, | ||
164 | "status": "Patched", | ||
165 | "detail": "fixed-version", | ||
166 | "summary": description, | ||
167 | "description": f"fixed-version: Fixed from version {last_affected}" | ||
168 | } | ||
169 | not_vulnerable += 1 | ||
170 | elif backport_base == base_version: | ||
171 | logging.debug("%s - cpe-stable-backport: Backported in %s", | ||
172 | cve_id, | ||
173 | better_match_last) | ||
174 | cves[cve_id] = { | ||
175 | "id": cve_id, | ||
176 | "status": "Patched", | ||
177 | "detail": "cpe-stable-backport", | ||
178 | "summary": description, | ||
179 | "description": f"Backported in {better_match_last}" | ||
180 | } | ||
181 | not_vulnerable += 1 | ||
182 | else: | ||
183 | logging.debug("%s - version not affected %s", cve_id, str(affected_versions)) | ||
184 | cves[cve_id] = { | ||
185 | "id": cve_id, | ||
186 | "status": "Patched", | ||
187 | "detail": "version-not-in-range", | ||
188 | "summary": description, | ||
189 | "description": f"Range {affected_versions}" | ||
190 | } | ||
191 | not_vulnerable += 1 | ||
192 | |||
193 | logging.info("Total CVEs ignored due to not applicable config: %d", not_applicable_config) | ||
194 | logging.info("Total CVEs not vulnerable due version-not-in-range: %d", not_vulnerable) | ||
195 | logging.info("Total vulnerable CVEs: %d", vulnerable) | ||
196 | |||
197 | logging.info("Total CVEs already backported in %s: %s", base_version, | ||
198 | fixed_as_later_backport) | ||
199 | return cves | ||
200 | |||
201 | def read_spdx(spdx_file): | ||
202 | '''Open SPDX file and extract compiled files''' | ||
203 | with open(spdx_file, 'r', encoding='ISO-8859-1') as f: | ||
204 | spdx = json.load(f) | ||
205 | if "spdxVersion" in spdx: | ||
206 | if spdx["spdxVersion"] == "SPDX-2.2": | ||
207 | return read_spdx2(spdx) | ||
208 | if "@graph" in spdx: | ||
209 | return read_spdx3(spdx) | ||
210 | return [] | ||
211 | |||
212 | def read_spdx2(spdx): | ||
213 | ''' | ||
214 | Read spdx2 compiled files from spdx | ||
215 | ''' | ||
216 | cfiles = set() | ||
217 | if 'files' not in spdx: | ||
218 | return cfiles | ||
219 | for item in spdx['files']: | ||
220 | for ftype in item['fileTypes']: | ||
221 | if ftype == "SOURCE": | ||
222 | filename = item["fileName"][item["fileName"].find("/")+1:] | ||
223 | cfiles.add(filename) | ||
224 | return cfiles | ||
225 | |||
226 | def read_spdx3(spdx): | ||
227 | ''' | ||
228 | Read spdx3 compiled files from spdx | ||
229 | ''' | ||
230 | cfiles = set() | ||
231 | for item in spdx["@graph"]: | ||
232 | if "software_primaryPurpose" not in item: | ||
233 | continue | ||
234 | if item["software_primaryPurpose"] == "source": | ||
235 | filename = item['name'][item['name'].find("/")+1:] | ||
236 | cfiles.add(filename) | ||
237 | return cfiles | ||
238 | |||
239 | def check_kernel_compiled_files(compiled_files, cve_info): | ||
240 | """ | ||
241 | Return if a CVE affected us depending on compiled files | ||
242 | """ | ||
243 | files_affected = set() | ||
244 | is_affected = False | ||
245 | |||
246 | for item in cve_info['containers']['cna']['affected']: | ||
247 | if "programFiles" in item: | ||
248 | for f in item['programFiles']: | ||
249 | if f not in files_affected: | ||
250 | files_affected.add(f) | ||
251 | |||
252 | if len(files_affected) > 0: | ||
253 | for f in files_affected: | ||
254 | if f in compiled_files: | ||
255 | logging.debug("File match: %s", f) | ||
256 | is_affected = True | ||
257 | return is_affected, files_affected | ||
258 | |||
259 | def get_cpe_applicability(cve_info, v): | ||
260 | ''' | ||
261 | Check if version is affected and return affected versions | ||
262 | ''' | ||
263 | base_branch = Version(f"{v.major}.{v.minor}") | ||
264 | affected = [] | ||
265 | if not 'cpeApplicability' in cve_info["containers"]["cna"]: | ||
266 | return None, None, None, None, None, None | ||
267 | |||
268 | for nodes in cve_info["containers"]["cna"]["cpeApplicability"]: | ||
269 | for node in nodes.values(): | ||
270 | vulnerable = False | ||
271 | matched_branch = False | ||
272 | first_affected = Version("5000") | ||
273 | last_affected = Version("0") | ||
274 | better_match_first = Version("0") | ||
275 | better_match_last = Version("5000") | ||
276 | |||
277 | if len(node[0]['cpeMatch']) == 0: | ||
278 | first_affected = None | ||
279 | last_affected = None | ||
280 | better_match_first = None | ||
281 | better_match_last = None | ||
282 | |||
283 | for cpe_match in node[0]['cpeMatch']: | ||
284 | version_start_including = Version("0") | ||
285 | version_end_excluding = Version("0") | ||
286 | if 'versionStartIncluding' in cpe_match: | ||
287 | version_start_including = Version(cpe_match['versionStartIncluding']) | ||
288 | else: | ||
289 | version_start_including = Version("0") | ||
290 | # if versionEndExcluding is missing we are in a branch, which is not fixed. | ||
291 | if "versionEndExcluding" in cpe_match: | ||
292 | version_end_excluding = Version(cpe_match["versionEndExcluding"]) | ||
293 | else: | ||
294 | # if versionEndExcluding is missing we are in a branch, which is not fixed. | ||
295 | version_end_excluding = Version( | ||
296 | f"{version_start_including.major}.{version_start_including.minor}.5000" | ||
297 | ) | ||
298 | affected.append(f" {version_start_including}-{version_end_excluding}") | ||
299 | # Detect if versionEnd is in fixed in base branch. It has precedence over the rest | ||
300 | branch_end = Version(f"{version_end_excluding.major}.{version_end_excluding.minor}") | ||
301 | if branch_end == base_branch: | ||
302 | if version_start_including <= v < version_end_excluding: | ||
303 | vulnerable = cpe_match['vulnerable'] | ||
304 | # If we don't match in our branch, we are not vulnerable, | ||
305 | # since we have a backport | ||
306 | matched_branch = True | ||
307 | better_match_first = version_start_including | ||
308 | better_match_last = version_end_excluding | ||
309 | if version_start_including <= v < version_end_excluding and not matched_branch: | ||
310 | if version_end_excluding < better_match_last: | ||
311 | better_match_first = max(version_start_including, better_match_first) | ||
312 | better_match_last = min(better_match_last, version_end_excluding) | ||
313 | vulnerable = cpe_match['vulnerable'] | ||
314 | matched_branch = True | ||
315 | |||
316 | first_affected = min(version_start_including, first_affected) | ||
317 | last_affected = max(version_end_excluding, last_affected) | ||
318 | # Not a better match, we use the first and last affected instead of the fake .5000 | ||
319 | if vulnerable and better_match_last == Version(f"{base_branch}.5000"): | ||
320 | better_match_last = last_affected | ||
321 | better_match_first = first_affected | ||
322 | return vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected | ||
323 | |||
324 | def copy_data(old, new): | ||
325 | '''Update dictionary with new entries, while keeping the old ones''' | ||
326 | for k in new.keys(): | ||
327 | old[k] = new[k] | ||
328 | return old | ||
329 | |||
330 | # Function taken from cve_check.bbclass. Adapted to cve fields | ||
331 | def cve_update(cve_data, cve, entry): | ||
332 | # If no entry, just add it | ||
333 | if cve not in cve_data: | ||
334 | cve_data[cve] = entry | ||
335 | return | ||
336 | # If we are updating, there might be change in the status | ||
337 | if cve_data[cve]['status'] == "Unknown": | ||
338 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
339 | return | ||
340 | if cve_data[cve]['status'] == entry['status']: | ||
341 | return | ||
342 | if entry['status'] == "Unpatched" and cve_data[cve]['status'] == "Patched": | ||
343 | logging.warning("CVE entry %s update from Patched to Unpatched from the scan result", cve) | ||
344 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
345 | return | ||
346 | if entry['status'] == "Patched" and cve_data[cve]['status'] == "Unpatched": | ||
347 | logging.warning("CVE entry %s update from Unpatched to Patched from the scan result", cve) | ||
348 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
349 | return | ||
350 | # If we have an "Ignored", it has a priority | ||
351 | if cve_data[cve]['status'] == "Ignored": | ||
352 | logging.debug("CVE %s not updating because Ignored", cve) | ||
353 | return | ||
354 | # If we have an "Ignored", it has a priority | ||
355 | if entry['status'] == "Ignored": | ||
356 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
357 | logging.debug("CVE entry %s updated from Unpatched to Ignored", cve) | ||
358 | return | ||
359 | logging.warning("Unhandled CVE entry update for %s %s from %s %s to %s", | ||
360 | cve, cve_data[cve]['status'], cve_data[cve]['detail'], entry['status'], entry['detail']) | ||
361 | |||
362 | def main(): | ||
363 | parser = argparse.ArgumentParser( | ||
364 | description="Update cve-summary with kernel compiled files and kernel CVE information" | ||
365 | ) | ||
366 | parser.add_argument( | ||
367 | "-s", | ||
368 | "--spdx", | ||
369 | help="SPDX2/3 for the kernel. Needs to include compiled sources", | ||
370 | ) | ||
371 | parser.add_argument( | ||
372 | "--datadir", | ||
373 | type=pathlib.Path, | ||
374 | help="Directory where CVE data is", | ||
375 | required=True | ||
376 | ) | ||
377 | parser.add_argument( | ||
378 | "--old-cve-report", | ||
379 | help="CVE report to update. (Optional)", | ||
380 | ) | ||
381 | parser.add_argument( | ||
382 | "--kernel-version", | ||
383 | help="Kernel version. Needed if old cve_report is not provided (Optional)", | ||
384 | type=Version | ||
385 | ) | ||
386 | parser.add_argument( | ||
387 | "--new-cve-report", | ||
388 | help="Output file", | ||
389 | default="cve-summary-enhance.json" | ||
390 | ) | ||
391 | parser.add_argument( | ||
392 | "-D", | ||
393 | "--debug", | ||
394 | help='Enable debug ', | ||
395 | action="store_true") | ||
396 | |||
397 | args = parser.parse_args() | ||
398 | |||
399 | if args.debug: | ||
400 | log_level=logging.DEBUG | ||
401 | else: | ||
402 | log_level=logging.INFO | ||
403 | logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', level=log_level) | ||
404 | |||
405 | if not args.kernel_version and not args.old_cve_report: | ||
406 | parser.error("either --kernel-version or --old-cve-report are needed") | ||
407 | return -1 | ||
408 | |||
409 | # by default we don't check the compiled files, unless provided | ||
410 | compiled_files = [] | ||
411 | if args.spdx: | ||
412 | compiled_files = read_spdx(args.spdx) | ||
413 | logging.info("Total compiled files %d", len(compiled_files)) | ||
414 | |||
415 | if args.old_cve_report: | ||
416 | with open(args.old_cve_report, encoding='ISO-8859-1') as f: | ||
417 | cve_report = json.load(f) | ||
418 | else: | ||
419 | #If summary not provided, we create one | ||
420 | cve_report = { | ||
421 | "version": "1", | ||
422 | "package": [ | ||
423 | { | ||
424 | "name": "linux-yocto", | ||
425 | "version": str(args.kernel_version), | ||
426 | "products": [ | ||
427 | { | ||
428 | "product": "linux_kernel", | ||
429 | "cvesInRecord": "Yes" | ||
430 | } | ||
431 | ], | ||
432 | "issue": [] | ||
433 | } | ||
434 | ] | ||
435 | } | ||
436 | |||
437 | for pkg in cve_report['package']: | ||
438 | is_kernel = False | ||
439 | for product in pkg['products']: | ||
440 | if product['product'] == "linux_kernel": | ||
441 | is_kernel=True | ||
442 | if not is_kernel: | ||
443 | continue | ||
444 | |||
445 | kernel_cves = get_kernel_cves(args.datadir, | ||
446 | compiled_files, | ||
447 | Version(pkg["version"])) | ||
448 | logging.info("Total kernel cves from kernel CNA: %s", len(kernel_cves)) | ||
449 | cves = {issue["id"]: issue for issue in pkg["issue"]} | ||
450 | logging.info("Total kernel before processing cves: %s", len(cves)) | ||
451 | |||
452 | for cve in kernel_cves: | ||
453 | cve_update(cves, cve, kernel_cves[cve]) | ||
454 | |||
455 | pkg["issue"] = [] | ||
456 | for cve in sorted(cves): | ||
457 | pkg["issue"].extend([cves[cve]]) | ||
458 | logging.info("Total kernel cves after processing: %s", len(pkg['issue'])) | ||
459 | |||
460 | with open(args.new_cve_report, "w", encoding='ISO-8859-1') as f: | ||
461 | json.dump(cve_report, f, indent=2) | ||
462 | |||
463 | return 0 | ||
464 | |||
465 | if __name__ == "__main__": | ||
466 | sys.exit(main()) | ||
467 | |||
diff --git a/scripts/contrib/make-spdx-bindings.sh b/scripts/contrib/make-spdx-bindings.sh new file mode 100755 index 0000000000..31caaf339d --- /dev/null +++ b/scripts/contrib/make-spdx-bindings.sh | |||
@@ -0,0 +1,12 @@ | |||
1 | #! /bin/sh | ||
2 | # | ||
3 | # SPDX-License-Identifier: MIT | ||
4 | |||
5 | THIS_DIR="$(dirname "$0")" | ||
6 | |||
7 | VERSION="3.0.1" | ||
8 | |||
9 | shacl2code generate --input https://spdx.org/rdf/$VERSION/spdx-model.ttl \ | ||
10 | --input https://spdx.org/rdf/$VERSION/spdx-json-serialize-annotations.ttl \ | ||
11 | --context https://spdx.org/rdf/$VERSION/spdx-context.jsonld \ | ||
12 | python -o $THIS_DIR/../../meta/lib/oe/spdx30.py | ||
diff --git a/scripts/contrib/oe-image-files-spdx/.gitignore b/scripts/contrib/oe-image-files-spdx/.gitignore new file mode 100644 index 0000000000..285851c984 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/.gitignore | |||
@@ -0,0 +1,8 @@ | |||
1 | *.spdx.json | ||
2 | *.pyc | ||
3 | *.bak | ||
4 | *.swp | ||
5 | *.swo | ||
6 | *.swn | ||
7 | venv/* | ||
8 | .venv/* | ||
diff --git a/scripts/contrib/oe-image-files-spdx/README.md b/scripts/contrib/oe-image-files-spdx/README.md new file mode 100644 index 0000000000..44f76eacd8 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/README.md | |||
@@ -0,0 +1,24 @@ | |||
1 | # OE Image Files from SBoM | ||
2 | |||
3 | This is an example python script that will list the packaged files with their | ||
4 | checksums based on the SPDX 3.0.1 SBoM. | ||
5 | |||
6 | It can be used as a template for other programs to investigate output based on | ||
7 | OE SPDX SBoMs | ||
8 | |||
9 | ## Installation | ||
10 | |||
11 | This project can be installed using an virtual environment: | ||
12 | ``` | ||
13 | python3 -m venv .venv | ||
14 | .venv/bin/activate | ||
15 | python3 -m pip install -e '.[dev]' | ||
16 | ``` | ||
17 | |||
18 | ## Usage | ||
19 | |||
20 | After installing, the `oe-image-files` program can be used to show the files, e.g.: | ||
21 | |||
22 | ``` | ||
23 | oe-image-files core-image-minimal-qemux86-64.rootfs.spdx.json | ||
24 | ``` | ||
diff --git a/scripts/contrib/oe-image-files-spdx/pyproject.toml b/scripts/contrib/oe-image-files-spdx/pyproject.toml new file mode 100644 index 0000000000..3fab5dd605 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/pyproject.toml | |||
@@ -0,0 +1,23 @@ | |||
1 | [project] | ||
2 | name = "oe-image-files" | ||
3 | description = "Displays all packaged files on the root file system" | ||
4 | dynamic = ["version"] | ||
5 | requires-python = ">= 3.8" | ||
6 | readme = "README.md" | ||
7 | |||
8 | dependencies = [ | ||
9 | "spdx_python_model @ git+https://github.com/spdx/spdx-python-model.git@aa40861f11d1b5d20edba7101835341a70d91179", | ||
10 | ] | ||
11 | |||
12 | [project.scripts] | ||
13 | oe-image-files = "oe_image_files:main" | ||
14 | |||
15 | [build-system] | ||
16 | requires = ["hatchling"] | ||
17 | build-backend = "hatchling.build" | ||
18 | |||
19 | [tool.hatch.version] | ||
20 | path = "src/oe_image_files/version.py" | ||
21 | |||
22 | [tool.hatch.metadata] | ||
23 | allow-direct-references = true | ||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py new file mode 100644 index 0000000000..c28a133f2d --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py | |||
@@ -0,0 +1 @@ | |||
from .main import main | |||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py new file mode 100644 index 0000000000..8476bf6369 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py | |||
@@ -0,0 +1,86 @@ | |||
1 | # SPDX-License-Identifier: MIT | ||
2 | |||
3 | import argparse | ||
4 | from pathlib import Path | ||
5 | |||
6 | |||
7 | from spdx_python_model import v3_0_1 as spdx_3_0_1 | ||
8 | from .version import VERSION | ||
9 | |||
10 | |||
11 | def main(): | ||
12 | parser = argparse.ArgumentParser( | ||
13 | description="Show the packaged files and checksums in an OE image from the SPDX SBoM" | ||
14 | ) | ||
15 | parser.add_argument("file", help="SPDX 3 input file", type=Path) | ||
16 | parser.add_argument("--version", "-V", action="version", version=VERSION) | ||
17 | |||
18 | args = parser.parse_args() | ||
19 | |||
20 | # Load SPDX data from file into a new object set | ||
21 | objset = spdx_3_0_1.SHACLObjectSet() | ||
22 | with args.file.open("r") as f: | ||
23 | d = spdx_3_0_1.JSONLDDeserializer() | ||
24 | d.read(f, objset) | ||
25 | |||
26 | # Find the top level SPDX Document object | ||
27 | for o in objset.foreach_type(spdx_3_0_1.SpdxDocument): | ||
28 | doc = o | ||
29 | break | ||
30 | else: | ||
31 | print("ERROR: No SPDX Document found!") | ||
32 | return 1 | ||
33 | |||
34 | # Find the root SBoM in the document | ||
35 | for o in doc.rootElement: | ||
36 | if isinstance(o, spdx_3_0_1.software_Sbom): | ||
37 | sbom = o | ||
38 | break | ||
39 | else: | ||
40 | print("ERROR: SBoM not found in document") | ||
41 | return 1 | ||
42 | |||
43 | # Find the root file system package in the SBoM | ||
44 | for o in sbom.rootElement: | ||
45 | if ( | ||
46 | isinstance(o, spdx_3_0_1.software_Package) | ||
47 | and o.software_primaryPurpose == spdx_3_0_1.software_SoftwarePurpose.archive | ||
48 | ): | ||
49 | root_package = o | ||
50 | break | ||
51 | else: | ||
52 | print("ERROR: Package not found in document") | ||
53 | return 1 | ||
54 | |||
55 | # Find all relationships of type "contains" that go FROM the root file | ||
56 | # system | ||
57 | files = [] | ||
58 | for rel in objset.foreach_type(spdx_3_0_1.Relationship): | ||
59 | if not rel.relationshipType == spdx_3_0_1.RelationshipType.contains: | ||
60 | continue | ||
61 | |||
62 | if not rel.from_ is root_package: | ||
63 | continue | ||
64 | |||
65 | # Iterate over all files in the TO of the relationship | ||
66 | for o in rel.to: | ||
67 | if not isinstance(o, spdx_3_0_1.software_File): | ||
68 | continue | ||
69 | |||
70 | # Find the SHA 256 hash of the file (if any) | ||
71 | for h in o.verifiedUsing: | ||
72 | if ( | ||
73 | isinstance(h, spdx_3_0_1.Hash) | ||
74 | and h.algorithm == spdx_3_0_1.HashAlgorithm.sha256 | ||
75 | ): | ||
76 | files.append((o.name, h.hashValue)) | ||
77 | break | ||
78 | else: | ||
79 | files.append((o.name, "")) | ||
80 | |||
81 | # Print files | ||
82 | files.sort(key=lambda x: x[0]) | ||
83 | for name, hash_val in files: | ||
84 | print(f"{name} - {hash_val}") | ||
85 | |||
86 | return 0 | ||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py new file mode 100644 index 0000000000..901e5110b2 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py | |||
@@ -0,0 +1 @@ | |||
VERSION = "0.0.1" | |||
diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py index bceae06561..d8d7b214e5 100755 --- a/scripts/contrib/patchreview.py +++ b/scripts/contrib/patchreview.py | |||
@@ -14,6 +14,10 @@ import pathlib | |||
14 | import re | 14 | import re |
15 | import subprocess | 15 | import subprocess |
16 | 16 | ||
17 | import sys | ||
18 | sys.path.append(os.path.join(sys.path[0], '../../meta/lib')) | ||
19 | import oe.qa | ||
20 | |||
17 | # TODO | 21 | # TODO |
18 | # - option to just list all broken files | 22 | # - option to just list all broken files |
19 | # - test suite | 23 | # - test suite |
@@ -47,7 +51,7 @@ def blame_patch(patch): | |||
47 | return subprocess.check_output(("git", "log", | 51 | return subprocess.check_output(("git", "log", |
48 | "--follow", "--find-renames", "--diff-filter=A", | 52 | "--follow", "--find-renames", "--diff-filter=A", |
49 | "--format=%s (%aN <%aE>)", | 53 | "--format=%s (%aN <%aE>)", |
50 | "--", patch)).decode("utf-8").splitlines() | 54 | "--", patch), cwd=os.path.dirname(patch)).decode("utf-8").splitlines() |
51 | 55 | ||
52 | def patchreview(patches): | 56 | def patchreview(patches): |
53 | 57 | ||
@@ -78,12 +82,11 @@ def patchreview(patches): | |||
78 | else: | 82 | else: |
79 | result.missing_sob = True | 83 | result.missing_sob = True |
80 | 84 | ||
81 | |||
82 | # Find the Upstream-Status tag | 85 | # Find the Upstream-Status tag |
83 | match = status_re.search(content) | 86 | match = status_re.search(content) |
84 | if match: | 87 | if match: |
85 | value = match.group(1) | 88 | value = oe.qa.check_upstream_status(patch) |
86 | if value != "Upstream-Status:": | 89 | if value: |
87 | result.malformed_upstream_status = value | 90 | result.malformed_upstream_status = value |
88 | 91 | ||
89 | value = match.group(2).lower() | 92 | value = match.group(2).lower() |
diff --git a/scripts/cve-json-to-text.py b/scripts/cve-json-to-text.py new file mode 100755 index 0000000000..8d309b37e5 --- /dev/null +++ b/scripts/cve-json-to-text.py | |||
@@ -0,0 +1,146 @@ | |||
1 | #!/bin/env python3 | ||
2 | # SPDX-FileCopyrightText: OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | |||
6 | # CVE results conversion script: JSON format to text | ||
7 | # Derived from cve-report.py from Oniro (MIT, by Huawei Inc) | ||
8 | |||
9 | import sys | ||
10 | import getopt | ||
11 | |||
12 | infile = "in.json" | ||
13 | outfile = "out.txt" | ||
14 | |||
15 | |||
16 | def show_syntax_and_exit(code): | ||
17 | """ | ||
18 | Show the program syntax and exit with an errror | ||
19 | Arguments: | ||
20 | code: the error code to return | ||
21 | """ | ||
22 | print("Syntax: %s [-h] [-i inputJSONfile][-o outputfile]" % sys.argv[0]) | ||
23 | sys.exit(code) | ||
24 | |||
25 | |||
26 | def exit_error(code, message): | ||
27 | """ | ||
28 | Show the error message and exit with an errror | ||
29 | Arguments: | ||
30 | code: the error code to return | ||
31 | message: the message to show | ||
32 | """ | ||
33 | print("Error: %s" % message) | ||
34 | sys.exit(code) | ||
35 | |||
36 | |||
37 | def parse_args(argv): | ||
38 | """ | ||
39 | Parse the program arguments, put options in global variables | ||
40 | Arguments: | ||
41 | argv: program arguments | ||
42 | """ | ||
43 | global infile, outfile | ||
44 | try: | ||
45 | opts, args = getopt.getopt( | ||
46 | argv, "hi:o:", ["help", "input", "output"] | ||
47 | ) | ||
48 | except getopt.GetoptError: | ||
49 | show_syntax_and_exit(1) | ||
50 | for opt, arg in opts: | ||
51 | if opt in ("-h"): | ||
52 | show_syntax_and_exit(0) | ||
53 | elif opt in ("-i"): | ||
54 | infile = arg | ||
55 | elif opt in ("-o"): | ||
56 | outfile = arg | ||
57 | |||
58 | def load_json(filename): | ||
59 | """ | ||
60 | Load the JSON file, return the resulting dictionary | ||
61 | Arguments: | ||
62 | filename: the file to open | ||
63 | Returns: | ||
64 | Parsed file as a dictionary | ||
65 | """ | ||
66 | import json | ||
67 | |||
68 | out = {} | ||
69 | try: | ||
70 | with open(filename, "r") as f: | ||
71 | out = json.load(f) | ||
72 | except FileNotFoundError: | ||
73 | exit_error(1, "Input file (%s) not found" % (filename)) | ||
74 | except json.decoder.JSONDecodeError as error: | ||
75 | exit_error(1, "Malformed JSON file: %s" % str(error)) | ||
76 | return out | ||
77 | |||
78 | |||
79 | def process_data(filename, data): | ||
80 | """ | ||
81 | Write the resulting CSV with one line for each package | ||
82 | Arguments: | ||
83 | filename: the file to write to | ||
84 | data: dictionary from parsing the JSON file | ||
85 | Returns: | ||
86 | None | ||
87 | """ | ||
88 | if not "version" in data or data["version"] != "1": | ||
89 | exit_error(1, "Unrecognized format version number") | ||
90 | if not "package" in data: | ||
91 | exit_error(1, "Mandatory 'package' key not found") | ||
92 | |||
93 | lines = "" | ||
94 | total_issue_count = 0 | ||
95 | for package in data["package"]: | ||
96 | package_info = "" | ||
97 | keys_in_package = {"name", "layer", "version", "issue"} | ||
98 | if keys_in_package - package.keys(): | ||
99 | exit_error( | ||
100 | 1, | ||
101 | "Missing a mandatory key in package: %s" | ||
102 | % (keys_in_package - package.keys()), | ||
103 | ) | ||
104 | |||
105 | package_info += "LAYER: %s\n" % package["layer"] | ||
106 | package_info += "PACKAGE NAME: %s\n" % package["name"] | ||
107 | package_info += "PACKAGE VERSION: %s\n" % package["version"] | ||
108 | |||
109 | for issue in package["issue"]: | ||
110 | keys_in_issue = {"id", "status", "detail"} | ||
111 | if keys_in_issue - issue.keys(): | ||
112 | print("Warning: Missing keys %s in 'issue' for the package '%s'" | ||
113 | % (keys_in_issue - issue.keys(), package["name"])) | ||
114 | |||
115 | lines += package_info | ||
116 | lines += "CVE: %s\n" % issue["id"] | ||
117 | lines += "CVE STATUS: %s\n" % issue["status"] | ||
118 | lines += "CVE DETAIL: %s\n" % issue["detail"] | ||
119 | if "description" in issue: | ||
120 | lines += "CVE DESCRIPTION: %s\n" % issue["description"] | ||
121 | if "summary" in issue: | ||
122 | lines += "CVE SUMMARY: %s\n" % issue["summary"] | ||
123 | if "scorev2" in issue: | ||
124 | lines += "CVSS v2 BASE SCORE: %s\n" % issue["scorev2"] | ||
125 | if "scorev3" in issue: | ||
126 | lines += "CVSS v3 BASE SCORE: %s\n" % issue["scorev3"] | ||
127 | if "scorev4" in issue: | ||
128 | lines += "CVSS v4 BASE SCORE: %s\n" % issue["scorev4"] | ||
129 | if "vector" in issue: | ||
130 | lines += "VECTOR: %s\n" % issue["vector"] | ||
131 | if "vectorString" in issue: | ||
132 | lines += "VECTORSTRING: %s\n" % issue["vectorString"] | ||
133 | lines += "MORE INFORMATION: https://nvd.nist.gov/vuln/detail/%s\n" % issue["id"] | ||
134 | lines += "\n" | ||
135 | |||
136 | with open(filename, "w") as f: | ||
137 | f.write(lines) | ||
138 | |||
139 | def main(argv): | ||
140 | parse_args(argv) | ||
141 | data = load_json(infile) | ||
142 | process_data(outfile, data) | ||
143 | |||
144 | |||
145 | if __name__ == "__main__": | ||
146 | main(sys.argv[1:]) | ||
diff --git a/scripts/devtool b/scripts/devtool index 60ea3e8298..39cebec0d8 100755 --- a/scripts/devtool +++ b/scripts/devtool | |||
@@ -7,19 +7,17 @@ | |||
7 | # SPDX-License-Identifier: GPL-2.0-only | 7 | # SPDX-License-Identifier: GPL-2.0-only |
8 | # | 8 | # |
9 | 9 | ||
10 | import dataclasses | ||
10 | import sys | 11 | import sys |
11 | import os | 12 | import os |
12 | import argparse | 13 | import argparse |
13 | import glob | 14 | import glob |
14 | import re | 15 | import re |
15 | import configparser | 16 | import configparser |
16 | import subprocess | ||
17 | import logging | 17 | import logging |
18 | 18 | ||
19 | basepath = '' | 19 | # This can be removed once our minimum is Python 3.9: https://docs.python.org/3/whatsnew/3.9.html#type-hinting-generics-in-standard-collections |
20 | workspace = {} | 20 | from typing import List |
21 | config = None | ||
22 | context = None | ||
23 | 21 | ||
24 | 22 | ||
25 | scripts_path = os.path.dirname(os.path.realpath(__file__)) | 23 | scripts_path = os.path.dirname(os.path.realpath(__file__)) |
@@ -30,16 +28,16 @@ import scriptutils | |||
30 | import argparse_oe | 28 | import argparse_oe |
31 | logger = scriptutils.logger_create('devtool') | 29 | logger = scriptutils.logger_create('devtool') |
32 | 30 | ||
33 | plugins = [] | ||
34 | 31 | ||
35 | 32 | class ConfigHandler: | |
36 | class ConfigHandler(object): | 33 | basepath = None |
37 | config_file = '' | 34 | config_file = '' |
38 | config_obj = None | 35 | config_obj = None |
39 | init_path = '' | 36 | init_path = '' |
40 | workspace_path = '' | 37 | workspace_path = '' |
41 | 38 | ||
42 | def __init__(self, filename): | 39 | def __init__(self, basepath, filename): |
40 | self.basepath = basepath | ||
43 | self.config_file = filename | 41 | self.config_file = filename |
44 | self.config_obj = configparser.ConfigParser() | 42 | self.config_obj = configparser.ConfigParser() |
45 | 43 | ||
@@ -47,7 +45,7 @@ class ConfigHandler(object): | |||
47 | try: | 45 | try: |
48 | ret = self.config_obj.get(section, option) | 46 | ret = self.config_obj.get(section, option) |
49 | except (configparser.NoOptionError, configparser.NoSectionError): | 47 | except (configparser.NoOptionError, configparser.NoSectionError): |
50 | if default != None: | 48 | if default is not None: |
51 | ret = default | 49 | ret = default |
52 | else: | 50 | else: |
53 | raise | 51 | raise |
@@ -59,14 +57,14 @@ class ConfigHandler(object): | |||
59 | 57 | ||
60 | if self.config_obj.has_option('General', 'init_path'): | 58 | if self.config_obj.has_option('General', 'init_path'): |
61 | pth = self.get('General', 'init_path') | 59 | pth = self.get('General', 'init_path') |
62 | self.init_path = os.path.join(basepath, pth) | 60 | self.init_path = os.path.join(self.basepath, pth) |
63 | if not os.path.exists(self.init_path): | 61 | if not os.path.exists(self.init_path): |
64 | logger.error('init_path %s specified in config file cannot be found' % pth) | 62 | logger.error('init_path %s specified in config file cannot be found' % pth) |
65 | return False | 63 | return False |
66 | else: | 64 | else: |
67 | self.config_obj.add_section('General') | 65 | self.config_obj.add_section('General') |
68 | 66 | ||
69 | self.workspace_path = self.get('General', 'workspace_path', os.path.join(basepath, 'workspace')) | 67 | self.workspace_path = self.get('General', 'workspace_path', os.path.join(self.basepath, 'workspace')) |
70 | return True | 68 | return True |
71 | 69 | ||
72 | 70 | ||
@@ -81,27 +79,29 @@ class ConfigHandler(object): | |||
81 | self.config_obj.add_section(section) | 79 | self.config_obj.add_section(section) |
82 | self.config_obj.set(section, option, value) | 80 | self.config_obj.set(section, option, value) |
83 | 81 | ||
82 | |||
83 | @dataclasses.dataclass | ||
84 | class Context: | 84 | class Context: |
85 | def __init__(self, **kwargs): | 85 | fixed_setup: bool |
86 | self.__dict__.update(kwargs) | 86 | config: ConfigHandler |
87 | pluginpaths: List[str] | ||
87 | 88 | ||
88 | 89 | ||
89 | def read_workspace(): | 90 | def read_workspace(basepath, context): |
90 | global workspace | ||
91 | workspace = {} | 91 | workspace = {} |
92 | if not os.path.exists(os.path.join(config.workspace_path, 'conf', 'layer.conf')): | 92 | if not os.path.exists(os.path.join(context.config.workspace_path, 'conf', 'layer.conf')): |
93 | if context.fixed_setup: | 93 | if context.fixed_setup: |
94 | logger.error("workspace layer not set up") | 94 | logger.error("workspace layer not set up") |
95 | sys.exit(1) | 95 | sys.exit(1) |
96 | else: | 96 | else: |
97 | logger.info('Creating workspace layer in %s' % config.workspace_path) | 97 | logger.info('Creating workspace layer in %s' % context.config.workspace_path) |
98 | _create_workspace(config.workspace_path, config, basepath) | 98 | _create_workspace(context.config.workspace_path, basepath) |
99 | if not context.fixed_setup: | 99 | if not context.fixed_setup: |
100 | _enable_workspace_layer(config.workspace_path, config, basepath) | 100 | _enable_workspace_layer(context.config.workspace_path, context.config, basepath) |
101 | 101 | ||
102 | logger.debug('Reading workspace in %s' % config.workspace_path) | 102 | logger.debug('Reading workspace in %s' % context.config.workspace_path) |
103 | externalsrc_re = re.compile(r'^EXTERNALSRC(:pn-([^ =]+))? *= *"([^"]*)"$') | 103 | externalsrc_re = re.compile(r'^EXTERNALSRC(:pn-([^ =]+))? *= *"([^"]*)"$') |
104 | for fn in glob.glob(os.path.join(config.workspace_path, 'appends', '*.bbappend')): | 104 | for fn in glob.glob(os.path.join(context.config.workspace_path, 'appends', '*.bbappend')): |
105 | with open(fn, 'r') as f: | 105 | with open(fn, 'r') as f: |
106 | pnvalues = {} | 106 | pnvalues = {} |
107 | pn = None | 107 | pn = None |
@@ -112,7 +112,7 @@ def read_workspace(): | |||
112 | pn = res.group(2) or recipepn | 112 | pn = res.group(2) or recipepn |
113 | # Find the recipe file within the workspace, if any | 113 | # Find the recipe file within the workspace, if any |
114 | bbfile = os.path.basename(fn).replace('.bbappend', '.bb').replace('%', '*') | 114 | bbfile = os.path.basename(fn).replace('.bbappend', '.bb').replace('%', '*') |
115 | recipefile = glob.glob(os.path.join(config.workspace_path, | 115 | recipefile = glob.glob(os.path.join(context.config.workspace_path, |
116 | 'recipes', | 116 | 'recipes', |
117 | recipepn, | 117 | recipepn, |
118 | bbfile)) | 118 | bbfile)) |
@@ -126,13 +126,15 @@ def read_workspace(): | |||
126 | if pnvalues: | 126 | if pnvalues: |
127 | if not pn: | 127 | if not pn: |
128 | raise DevtoolError("Found *.bbappend in %s, but could not determine EXTERNALSRC:pn-*. " | 128 | raise DevtoolError("Found *.bbappend in %s, but could not determine EXTERNALSRC:pn-*. " |
129 | "Maybe still using old syntax?" % config.workspace_path) | 129 | "Maybe still using old syntax?" % context.config.workspace_path) |
130 | if not pnvalues.get('srctreebase', None): | 130 | if not pnvalues.get('srctreebase', None): |
131 | pnvalues['srctreebase'] = pnvalues['srctree'] | 131 | pnvalues['srctreebase'] = pnvalues['srctree'] |
132 | logger.debug('Found recipe %s' % pnvalues) | 132 | logger.debug('Found recipe %s' % pnvalues) |
133 | workspace[pn] = pnvalues | 133 | workspace[pn] = pnvalues |
134 | 134 | ||
135 | def create_workspace(args, config, basepath, workspace): | 135 | return workspace |
136 | |||
137 | def create_workspace(args, config, basepath, _workspace): | ||
136 | if args.layerpath: | 138 | if args.layerpath: |
137 | workspacedir = os.path.abspath(args.layerpath) | 139 | workspacedir = os.path.abspath(args.layerpath) |
138 | else: | 140 | else: |
@@ -140,12 +142,12 @@ def create_workspace(args, config, basepath, workspace): | |||
140 | layerseries = None | 142 | layerseries = None |
141 | if args.layerseries: | 143 | if args.layerseries: |
142 | layerseries = args.layerseries | 144 | layerseries = args.layerseries |
143 | _create_workspace(workspacedir, config, basepath, layerseries) | 145 | _create_workspace(workspacedir, basepath, layerseries) |
144 | if not args.create_only: | 146 | if not args.create_only: |
145 | _enable_workspace_layer(workspacedir, config, basepath) | 147 | _enable_workspace_layer(workspacedir, config, basepath) |
146 | 148 | ||
147 | def _create_workspace(workspacedir, config, basepath, layerseries=None): | 149 | def _create_workspace(workspacedir, basepath, layerseries=None): |
148 | import bb | 150 | import bb.utils |
149 | 151 | ||
150 | confdir = os.path.join(workspacedir, 'conf') | 152 | confdir = os.path.join(workspacedir, 'conf') |
151 | if os.path.exists(os.path.join(confdir, 'layer.conf')): | 153 | if os.path.exists(os.path.join(confdir, 'layer.conf')): |
@@ -190,7 +192,7 @@ def _create_workspace(workspacedir, config, basepath, layerseries=None): | |||
190 | 192 | ||
191 | def _enable_workspace_layer(workspacedir, config, basepath): | 193 | def _enable_workspace_layer(workspacedir, config, basepath): |
192 | """Ensure the workspace layer is in bblayers.conf""" | 194 | """Ensure the workspace layer is in bblayers.conf""" |
193 | import bb | 195 | import bb.utils |
194 | bblayers_conf = os.path.join(basepath, 'conf', 'bblayers.conf') | 196 | bblayers_conf = os.path.join(basepath, 'conf', 'bblayers.conf') |
195 | if not os.path.exists(bblayers_conf): | 197 | if not os.path.exists(bblayers_conf): |
196 | logger.error('Unable to find bblayers.conf') | 198 | logger.error('Unable to find bblayers.conf') |
@@ -209,15 +211,9 @@ def _enable_workspace_layer(workspacedir, config, basepath): | |||
209 | 211 | ||
210 | 212 | ||
211 | def main(): | 213 | def main(): |
212 | global basepath | ||
213 | global config | ||
214 | global context | ||
215 | |||
216 | if sys.getfilesystemencoding() != "utf-8": | 214 | if sys.getfilesystemencoding() != "utf-8": |
217 | sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.") | 215 | sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.") |
218 | 216 | ||
219 | context = Context(fixed_setup=False) | ||
220 | |||
221 | # Default basepath | 217 | # Default basepath |
222 | basepath = os.path.dirname(os.path.abspath(__file__)) | 218 | basepath = os.path.dirname(os.path.abspath(__file__)) |
223 | 219 | ||
@@ -242,21 +238,23 @@ def main(): | |||
242 | elif global_args.quiet: | 238 | elif global_args.quiet: |
243 | logger.setLevel(logging.ERROR) | 239 | logger.setLevel(logging.ERROR) |
244 | 240 | ||
241 | is_fixed_setup = False | ||
242 | |||
245 | if global_args.basepath: | 243 | if global_args.basepath: |
246 | # Override | 244 | # Override |
247 | basepath = global_args.basepath | 245 | basepath = global_args.basepath |
248 | if os.path.exists(os.path.join(basepath, '.devtoolbase')): | 246 | if os.path.exists(os.path.join(basepath, '.devtoolbase')): |
249 | context.fixed_setup = True | 247 | is_fixed_setup = True |
250 | else: | 248 | else: |
251 | pth = basepath | 249 | pth = basepath |
252 | while pth != '' and pth != os.sep: | 250 | while pth != '' and pth != os.sep: |
253 | if os.path.exists(os.path.join(pth, '.devtoolbase')): | 251 | if os.path.exists(os.path.join(pth, '.devtoolbase')): |
254 | context.fixed_setup = True | 252 | is_fixed_setup = True |
255 | basepath = pth | 253 | basepath = pth |
256 | break | 254 | break |
257 | pth = os.path.dirname(pth) | 255 | pth = os.path.dirname(pth) |
258 | 256 | ||
259 | if not context.fixed_setup: | 257 | if not is_fixed_setup: |
260 | basepath = os.environ.get('BUILDDIR') | 258 | basepath = os.environ.get('BUILDDIR') |
261 | if not basepath: | 259 | if not basepath: |
262 | logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)") | 260 | logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)") |
@@ -264,10 +262,9 @@ def main(): | |||
264 | 262 | ||
265 | logger.debug('Using basepath %s' % basepath) | 263 | logger.debug('Using basepath %s' % basepath) |
266 | 264 | ||
267 | config = ConfigHandler(os.path.join(basepath, 'conf', 'devtool.conf')) | 265 | config = ConfigHandler(basepath, os.path.join(basepath, 'conf', 'devtool.conf')) |
268 | if not config.read(): | 266 | if not config.read(): |
269 | return -1 | 267 | return -1 |
270 | context.config = config | ||
271 | 268 | ||
272 | bitbake_subdir = config.get('General', 'bitbake_subdir', '') | 269 | bitbake_subdir = config.get('General', 'bitbake_subdir', '') |
273 | if bitbake_subdir: | 270 | if bitbake_subdir: |
@@ -289,6 +286,7 @@ def main(): | |||
289 | scriptutils.logger_setup_color(logger, global_args.color) | 286 | scriptutils.logger_setup_color(logger, global_args.color) |
290 | 287 | ||
291 | if global_args.bbpath is None: | 288 | if global_args.bbpath is None: |
289 | import bb | ||
292 | try: | 290 | try: |
293 | tinfoil = setup_tinfoil(config_only=True, basepath=basepath) | 291 | tinfoil = setup_tinfoil(config_only=True, basepath=basepath) |
294 | try: | 292 | try: |
@@ -300,7 +298,10 @@ def main(): | |||
300 | 298 | ||
301 | # Search BBPATH first to allow layers to override plugins in scripts_path | 299 | # Search BBPATH first to allow layers to override plugins in scripts_path |
302 | pluginpaths = [os.path.join(path, 'lib', 'devtool') for path in global_args.bbpath.split(':') + [scripts_path]] | 300 | pluginpaths = [os.path.join(path, 'lib', 'devtool') for path in global_args.bbpath.split(':') + [scripts_path]] |
303 | context.pluginpaths = pluginpaths | 301 | |
302 | context = Context(fixed_setup=is_fixed_setup, config=config, pluginpaths=pluginpaths) | ||
303 | |||
304 | plugins = [] | ||
304 | for pluginpath in pluginpaths: | 305 | for pluginpath in pluginpaths: |
305 | scriptutils.load_plugins(logger, plugins, pluginpath) | 306 | scriptutils.load_plugins(logger, plugins, pluginpath) |
306 | 307 | ||
@@ -331,9 +332,9 @@ def main(): | |||
331 | args = parser.parse_args(unparsed_args, namespace=global_args) | 332 | args = parser.parse_args(unparsed_args, namespace=global_args) |
332 | 333 | ||
333 | try: | 334 | try: |
335 | workspace = {} | ||
334 | if not getattr(args, 'no_workspace', False): | 336 | if not getattr(args, 'no_workspace', False): |
335 | read_workspace() | 337 | workspace = read_workspace(basepath, context) |
336 | |||
337 | ret = args.func(args, config, basepath, workspace) | 338 | ret = args.func(args, config, basepath, workspace) |
338 | except DevtoolError as err: | 339 | except DevtoolError as err: |
339 | if str(err): | 340 | if str(err): |
@@ -341,6 +342,7 @@ def main(): | |||
341 | ret = err.exitcode | 342 | ret = err.exitcode |
342 | except argparse_oe.ArgumentUsageError as ae: | 343 | except argparse_oe.ArgumentUsageError as ae: |
343 | parser.error_subcommand(ae.message, ae.subcommand) | 344 | parser.error_subcommand(ae.message, ae.subcommand) |
345 | ret = 2 | ||
344 | 346 | ||
345 | return ret | 347 | return ret |
346 | 348 | ||
diff --git a/scripts/gen-site-config b/scripts/gen-site-config deleted file mode 100755 index 727b809c0f..0000000000 --- a/scripts/gen-site-config +++ /dev/null | |||
@@ -1,43 +0,0 @@ | |||
1 | #! /bin/sh | ||
2 | # Copyright (c) 2005-2008 Wind River Systems, Inc. | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | cat << EOF | ||
8 | AC_PREREQ(2.57) | ||
9 | AC_INIT([site_wide],[1.0.0]) | ||
10 | |||
11 | EOF | ||
12 | |||
13 | # Disable as endian is set in the default config | ||
14 | #echo AC_C_BIGENDIAN | ||
15 | #echo | ||
16 | |||
17 | if [ -e $1/types ] ; then | ||
18 | while read type ; do | ||
19 | echo "AC_CHECK_SIZEOF([$type])" | ||
20 | done < $1/types | ||
21 | |||
22 | echo | ||
23 | fi | ||
24 | |||
25 | if [ -e $1/funcs ]; then | ||
26 | while read func ; do | ||
27 | echo "AC_CHECK_FUNCS([$func])" | ||
28 | done < $1/funcs | ||
29 | |||
30 | echo | ||
31 | fi | ||
32 | |||
33 | if [ -e $1/headers ]; then | ||
34 | while read header ; do | ||
35 | echo "AC_CHECK_HEADERS([$header])" | ||
36 | done < $1/headers | ||
37 | |||
38 | echo | ||
39 | fi | ||
40 | |||
41 | cat << EOF | ||
42 | AC_OUTPUT | ||
43 | EOF | ||
diff --git a/scripts/install-buildtools b/scripts/install-buildtools index 2218f3ffac..aa23942858 100755 --- a/scripts/install-buildtools +++ b/scripts/install-buildtools | |||
@@ -56,9 +56,9 @@ PROGNAME = 'install-buildtools' | |||
56 | logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout) | 56 | logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout) |
57 | 57 | ||
58 | DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools') | 58 | DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools') |
59 | DEFAULT_BASE_URL = 'http://downloads.yoctoproject.org/releases/yocto' | 59 | DEFAULT_BASE_URL = 'https://downloads.yoctoproject.org/releases/yocto' |
60 | DEFAULT_RELEASE = 'yocto-4.1' | 60 | DEFAULT_RELEASE = 'yocto-5.2.1' |
61 | DEFAULT_INSTALLER_VERSION = '4.1' | 61 | DEFAULT_INSTALLER_VERSION = '5.2.1' |
62 | DEFAULT_BUILDDATE = '202110XX' | 62 | DEFAULT_BUILDDATE = '202110XX' |
63 | 63 | ||
64 | # Python version sanity check | 64 | # Python version sanity check |
@@ -102,6 +102,16 @@ def sha256_file(filename): | |||
102 | import hashlib | 102 | import hashlib |
103 | return _hasher(hashlib.sha256(), filename) | 103 | return _hasher(hashlib.sha256(), filename) |
104 | 104 | ||
105 | def remove_quotes(var): | ||
106 | """ | ||
107 | If a variable starts and ends with double quotes, remove them. | ||
108 | Assumption: if a variable starts with double quotes, it must also | ||
109 | end with them. | ||
110 | """ | ||
111 | if var[0] == '"': | ||
112 | var = var[1:-1] | ||
113 | return var | ||
114 | |||
105 | 115 | ||
106 | def main(): | 116 | def main(): |
107 | global DEFAULT_INSTALL_DIR | 117 | global DEFAULT_INSTALL_DIR |
@@ -117,7 +127,8 @@ def main(): | |||
117 | 127 | ||
118 | parser = argparse.ArgumentParser( | 128 | parser = argparse.ArgumentParser( |
119 | description="Buildtools installation helper", | 129 | description="Buildtools installation helper", |
120 | add_help=False) | 130 | add_help=False, |
131 | formatter_class=argparse.RawTextHelpFormatter) | ||
121 | parser.add_argument('-u', '--url', | 132 | parser.add_argument('-u', '--url', |
122 | help='URL from where to fetch buildtools SDK installer, not ' | 133 | help='URL from where to fetch buildtools SDK installer, not ' |
123 | 'including filename (optional)\n' | 134 | 'including filename (optional)\n' |
@@ -131,6 +142,9 @@ def main(): | |||
131 | default=DEFAULT_INSTALL_DIR, | 142 | default=DEFAULT_INSTALL_DIR, |
132 | help='directory where buildtools SDK will be installed (optional)', | 143 | help='directory where buildtools SDK will be installed (optional)', |
133 | action='store') | 144 | action='store') |
145 | parser.add_argument('--downloads-directory', | ||
146 | help='use this directory for tarball/checksum downloads and do not erase them (default is a temporary directory which is deleted after unpacking and installing the buildtools)', | ||
147 | action='store') | ||
134 | parser.add_argument('-r', '--release', | 148 | parser.add_argument('-r', '--release', |
135 | default=DEFAULT_RELEASE, | 149 | default=DEFAULT_RELEASE, |
136 | help='Yocto Project release string for SDK which will be ' | 150 | help='Yocto Project release string for SDK which will be ' |
@@ -224,11 +238,14 @@ def main(): | |||
224 | safe_filename = quote(filename) | 238 | safe_filename = quote(filename) |
225 | buildtools_url = "%s/%s/buildtools/%s" % (base_url, args.release, safe_filename) | 239 | buildtools_url = "%s/%s/buildtools/%s" % (base_url, args.release, safe_filename) |
226 | 240 | ||
227 | tmpsdk_dir = tempfile.mkdtemp() | 241 | sdk_dir = args.downloads_directory or tempfile.mkdtemp() |
242 | os.makedirs(sdk_dir, exist_ok=True) | ||
228 | try: | 243 | try: |
229 | # Fetch installer | 244 | # Fetch installer |
230 | logger.info("Fetching buildtools installer") | 245 | logger.info("Fetching buildtools installer") |
231 | tmpbuildtools = os.path.join(tmpsdk_dir, filename) | 246 | tmpbuildtools = os.path.join(sdk_dir, filename) |
247 | with open(os.path.join(sdk_dir, 'buildtools_url'), 'w') as f: | ||
248 | f.write(buildtools_url) | ||
232 | ret = subprocess.call("wget -q -O %s %s" % | 249 | ret = subprocess.call("wget -q -O %s %s" % |
233 | (tmpbuildtools, buildtools_url), shell=True) | 250 | (tmpbuildtools, buildtools_url), shell=True) |
234 | if ret != 0: | 251 | if ret != 0: |
@@ -238,19 +255,17 @@ def main(): | |||
238 | # Verify checksum | 255 | # Verify checksum |
239 | if args.check: | 256 | if args.check: |
240 | logger.info("Fetching buildtools installer checksum") | 257 | logger.info("Fetching buildtools installer checksum") |
241 | checksum_type = "" | 258 | checksum_type = "sha256sum" |
242 | for checksum_type in ["md5sum", "sha256sum"]: | 259 | checksum_url = "{}.{}".format(buildtools_url, checksum_type) |
243 | check_url = "{}.{}".format(buildtools_url, checksum_type) | 260 | checksum_filename = "{}.{}".format(filename, checksum_type) |
244 | checksum_filename = "{}.{}".format(filename, checksum_type) | 261 | tmpbuildtools_checksum = os.path.join(sdk_dir, checksum_filename) |
245 | tmpbuildtools_checksum = os.path.join(tmpsdk_dir, checksum_filename) | 262 | with open(os.path.join(sdk_dir, 'checksum_url'), 'w') as f: |
246 | ret = subprocess.call("wget -q -O %s %s" % | 263 | f.write(checksum_url) |
247 | (tmpbuildtools_checksum, check_url), shell=True) | 264 | ret = subprocess.call("wget -q -O %s %s" % |
248 | if ret == 0: | 265 | (tmpbuildtools_checksum, checksum_url), shell=True) |
249 | break | 266 | if ret != 0: |
250 | else: | 267 | logger.error("Could not download file from %s" % checksum_url) |
251 | if ret != 0: | 268 | return ret |
252 | logger.error("Could not download file from %s" % check_url) | ||
253 | return ret | ||
254 | regex = re.compile(r"^(?P<checksum>[0-9a-f]+)\s+(?P<path>.*/)?(?P<filename>.*)$") | 269 | regex = re.compile(r"^(?P<checksum>[0-9a-f]+)\s+(?P<path>.*/)?(?P<filename>.*)$") |
255 | with open(tmpbuildtools_checksum, 'rb') as f: | 270 | with open(tmpbuildtools_checksum, 'rb') as f: |
256 | original = f.read() | 271 | original = f.read() |
@@ -263,10 +278,7 @@ def main(): | |||
263 | logger.error("Filename does not match name in checksum") | 278 | logger.error("Filename does not match name in checksum") |
264 | return 1 | 279 | return 1 |
265 | checksum = m.group('checksum') | 280 | checksum = m.group('checksum') |
266 | if checksum_type == "md5sum": | 281 | checksum_value = sha256_file(tmpbuildtools) |
267 | checksum_value = md5_file(tmpbuildtools) | ||
268 | else: | ||
269 | checksum_value = sha256_file(tmpbuildtools) | ||
270 | if checksum == checksum_value: | 282 | if checksum == checksum_value: |
271 | logger.info("Checksum success") | 283 | logger.info("Checksum success") |
272 | else: | 284 | else: |
@@ -280,7 +292,7 @@ def main(): | |||
280 | os.chmod(tmpbuildtools, st.st_mode | stat.S_IEXEC) | 292 | os.chmod(tmpbuildtools, st.st_mode | stat.S_IEXEC) |
281 | logger.debug(os.stat(tmpbuildtools)) | 293 | logger.debug(os.stat(tmpbuildtools)) |
282 | if args.directory: | 294 | if args.directory: |
283 | install_dir = args.directory | 295 | install_dir = os.path.abspath(args.directory) |
284 | ret = subprocess.call("%s -d %s -y" % | 296 | ret = subprocess.call("%s -d %s -y" % |
285 | (tmpbuildtools, install_dir), shell=True) | 297 | (tmpbuildtools, install_dir), shell=True) |
286 | else: | 298 | else: |
@@ -301,7 +313,7 @@ def main(): | |||
301 | if match: | 313 | if match: |
302 | env_var = match.group('env_var') | 314 | env_var = match.group('env_var') |
303 | logger.debug("env_var: %s" % env_var) | 315 | logger.debug("env_var: %s" % env_var) |
304 | env_val = match.group('env_val') | 316 | env_val = remove_quotes(match.group('env_val')) |
305 | logger.debug("env_val: %s" % env_val) | 317 | logger.debug("env_val: %s" % env_val) |
306 | os.environ[env_var] = env_val | 318 | os.environ[env_var] = env_val |
307 | 319 | ||
@@ -343,7 +355,8 @@ def main(): | |||
343 | 355 | ||
344 | finally: | 356 | finally: |
345 | # cleanup tmp directory | 357 | # cleanup tmp directory |
346 | shutil.rmtree(tmpsdk_dir) | 358 | if not args.downloads_directory: |
359 | shutil.rmtree(sdk_dir) | ||
347 | 360 | ||
348 | 361 | ||
349 | if __name__ == '__main__': | 362 | if __name__ == '__main__': |
diff --git a/scripts/lib/build_perf/html/measurement_chart.html b/scripts/lib/build_perf/html/measurement_chart.html index 65f1a227ad..86435273cf 100644 --- a/scripts/lib/build_perf/html/measurement_chart.html +++ b/scripts/lib/build_perf/html/measurement_chart.html | |||
@@ -1,50 +1,168 @@ | |||
1 | <script type="text/javascript"> | 1 | <script type="module"> |
2 | chartsDrawing += 1; | 2 | // Get raw data |
3 | google.charts.setOnLoadCallback(drawChart_{{ chart_elem_id }}); | 3 | const rawData = [ |
4 | function drawChart_{{ chart_elem_id }}() { | 4 | {% for sample in measurement.samples %} |
5 | var data = new google.visualization.DataTable(); | 5 | [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}, {{ sample.start_time }}, '{{sample.commit}}'], |
6 | 6 | {% endfor %} | |
7 | // Chart options | 7 | ]; |
8 | var options = { | 8 | |
9 | theme : 'material', | 9 | const convertToMinute = (time) => { |
10 | legend: 'none', | 10 | return time[0]*60 + time[1] + time[2]/60 + time[3]/3600; |
11 | hAxis: { format: '', title: 'Commit number', | 11 | } |
12 | minValue: {{ chart_opts.haxis.min }}, | 12 | |
13 | maxValue: {{ chart_opts.haxis.max }} }, | 13 | // Update value format to either minutes or leave as size value |
14 | {% if measurement.type == 'time' %} | 14 | const updateValue = (value) => { |
15 | vAxis: { format: 'h:mm:ss' }, | 15 | // Assuming the array values are duration in the format [hours, minutes, seconds, milliseconds] |
16 | {% else %} | 16 | return Array.isArray(value) ? convertToMinute(value) : value |
17 | vAxis: { format: '' }, | 17 | } |
18 | {% endif %} | 18 | |
19 | pointSize: 5, | 19 | // Convert raw data to the format: [time, value] |
20 | chartArea: { left: 80, right: 15 }, | 20 | const data = rawData.map(([commit, value, time]) => { |
21 | }; | 21 | return [ |
22 | 22 | // The Date object takes values in milliseconds rather than seconds. So to use a Unix timestamp we have to multiply it by 1000. | |
23 | // Define data columns | 23 | new Date(time * 1000).getTime(), |
24 | data.addColumn('number', 'Commit'); | 24 | // Assuming the array values are duration in the format [hours, minutes, seconds, milliseconds] |
25 | data.addColumn('{{ measurement.value_type.gv_data_type }}', | 25 | updateValue(value) |
26 | '{{ measurement.value_type.quantity }}'); | 26 | ] |
27 | // Add data rows | 27 | }); |
28 | data.addRows([ | 28 | |
29 | {% for sample in measurement.samples %} | 29 | const commitCountList = rawData.map(([commit, value, time]) => { |
30 | [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}], | 30 | return commit |
31 | {% endfor %} | 31 | }); |
32 | ]); | 32 | |
33 | 33 | const commitCountData = rawData.map(([commit, value, time]) => { | |
34 | // Finally, draw the chart | 34 | return updateValue(value) |
35 | chart_div = document.getElementById('{{ chart_elem_id }}'); | 35 | }); |
36 | var chart = new google.visualization.LineChart(chart_div); | 36 | |
37 | google.visualization.events.addListener(chart, 'ready', function () { | 37 | // Set chart options |
38 | //chart_div = document.getElementById('{{ chart_elem_id }}'); | 38 | const option_start_time = { |
39 | //chart_div.innerHTML = '<img src="' + chart.getImageURI() + '">'; | 39 | tooltip: { |
40 | png_div = document.getElementById('{{ chart_elem_id }}_png'); | 40 | trigger: 'axis', |
41 | png_div.outerHTML = '<a id="{{ chart_elem_id }}_png" href="' + chart.getImageURI() + '">PNG</a>'; | 41 | enterable: true, |
42 | console.log("CHART READY: {{ chart_elem_id }}"); | 42 | position: function (point, params, dom, rect, size) { |
43 | chartsDrawing -= 1; | 43 | return [point[0], '0%']; |
44 | if (chartsDrawing == 0) | 44 | }, |
45 | console.log("ALL CHARTS READY"); | 45 | formatter: function (param) { |
46 | const value = param[0].value[1] | ||
47 | const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value) | ||
48 | const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)'); | ||
49 | |||
50 | // Add commit hash to the tooltip as a link | ||
51 | const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}` | ||
52 | if ('{{ measurement.value_type.quantity }}' == 'time') { | ||
53 | const hours = Math.floor(value/60) | ||
54 | const minutes = Math.floor(value % 60) | ||
55 | const seconds = Math.floor((value * 60) % 60) | ||
56 | return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` | ||
57 | } | ||
58 | return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` | ||
59 | ;} | ||
60 | }, | ||
61 | xAxis: { | ||
62 | type: 'time', | ||
63 | }, | ||
64 | yAxis: { | ||
65 | name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB', | ||
66 | type: 'value', | ||
67 | min: function(value) { | ||
68 | return Math.round(value.min - 0.5); | ||
69 | }, | ||
70 | max: function(value) { | ||
71 | return Math.round(value.max + 0.5); | ||
72 | } | ||
73 | }, | ||
74 | dataZoom: [ | ||
75 | { | ||
76 | type: 'slider', | ||
77 | xAxisIndex: 0, | ||
78 | filterMode: 'none' | ||
79 | }, | ||
80 | ], | ||
81 | series: [ | ||
82 | { | ||
83 | name: '{{ measurement.value_type.quantity }}', | ||
84 | type: 'line', | ||
85 | symbol: 'none', | ||
86 | data: data | ||
87 | } | ||
88 | ] | ||
89 | }; | ||
90 | |||
91 | const option_commit_count = { | ||
92 | tooltip: { | ||
93 | trigger: 'axis', | ||
94 | enterable: true, | ||
95 | position: function (point, params, dom, rect, size) { | ||
96 | return [point[0], '0%']; | ||
97 | }, | ||
98 | formatter: function (param) { | ||
99 | const value = param[0].value | ||
100 | const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value) | ||
101 | const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)'); | ||
102 | // Add commit hash to the tooltip as a link | ||
103 | const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}` | ||
104 | if ('{{ measurement.value_type.quantity }}' == 'time') { | ||
105 | const hours = Math.floor(value/60) | ||
106 | const minutes = Math.floor(value % 60) | ||
107 | const seconds = Math.floor((value * 60) % 60) | ||
108 | return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` | ||
109 | } | ||
110 | return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}` | ||
111 | ;} | ||
112 | }, | ||
113 | xAxis: { | ||
114 | name: 'Commit count', | ||
115 | type: 'category', | ||
116 | data: commitCountList | ||
117 | }, | ||
118 | yAxis: { | ||
119 | name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB', | ||
120 | type: 'value', | ||
121 | min: function(value) { | ||
122 | return Math.round(value.min - 0.5); | ||
123 | }, | ||
124 | max: function(value) { | ||
125 | return Math.round(value.max + 0.5); | ||
126 | } | ||
127 | }, | ||
128 | dataZoom: [ | ||
129 | { | ||
130 | type: 'slider', | ||
131 | xAxisIndex: 0, | ||
132 | filterMode: 'none' | ||
133 | }, | ||
134 | ], | ||
135 | series: [ | ||
136 | { | ||
137 | name: '{{ measurement.value_type.quantity }}', | ||
138 | type: 'line', | ||
139 | symbol: 'none', | ||
140 | data: commitCountData | ||
141 | } | ||
142 | ] | ||
143 | }; | ||
144 | |||
145 | // Draw chart | ||
146 | const draw_chart = (chart_id, option) => { | ||
147 | let chart_name | ||
148 | const chart_div = document.getElementById(chart_id); | ||
149 | // Set dark mode | ||
150 | if (window.matchMedia('(prefers-color-scheme: dark)').matches) { | ||
151 | chart_name= echarts.init(chart_div, 'dark', { | ||
152 | height: 320 | ||
153 | }); | ||
154 | } else { | ||
155 | chart_name= echarts.init(chart_div, null, { | ||
156 | height: 320 | ||
157 | }); | ||
158 | } | ||
159 | // Change chart size with browser resize | ||
160 | window.addEventListener('resize', function() { | ||
161 | chart_name.resize(); | ||
46 | }); | 162 | }); |
47 | chart.draw(data, options); | 163 | return chart_name.setOption(option); |
48 | } | 164 | } |
49 | </script> | ||
50 | 165 | ||
166 | draw_chart('{{ chart_elem_start_time_id }}', option_start_time) | ||
167 | draw_chart('{{ chart_elem_commit_count_id }}', option_commit_count) | ||
168 | </script> | ||
diff --git a/scripts/lib/build_perf/html/report.html b/scripts/lib/build_perf/html/report.html index d1ba6f2578..28cd80e738 100644 --- a/scripts/lib/build_perf/html/report.html +++ b/scripts/lib/build_perf/html/report.html | |||
@@ -3,17 +3,14 @@ | |||
3 | <head> | 3 | <head> |
4 | {# Scripts, for visualization#} | 4 | {# Scripts, for visualization#} |
5 | <!--START-OF-SCRIPTS--> | 5 | <!--START-OF-SCRIPTS--> |
6 | <script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script> | 6 | <script src=" https://cdn.jsdelivr.net/npm/echarts@5.5.0/dist/echarts.min.js "></script> |
7 | <script type="text/javascript"> | ||
8 | google.charts.load('current', {'packages':['corechart']}); | ||
9 | var chartsDrawing = 0; | ||
10 | </script> | ||
11 | 7 | ||
12 | {# Render measurement result charts #} | 8 | {# Render measurement result charts #} |
13 | {% for test in test_data %} | 9 | {% for test in test_data %} |
14 | {% if test.status == 'SUCCESS' %} | 10 | {% if test.status == 'SUCCESS' %} |
15 | {% for measurement in test.measurements %} | 11 | {% for measurement in test.measurements %} |
16 | {% set chart_elem_id = test.name + '_' + measurement.name + '_chart' %} | 12 | {% set chart_elem_start_time_id = test.name + '_' + measurement.name + '_chart_start_time' %} |
13 | {% set chart_elem_commit_count_id = test.name + '_' + measurement.name + '_chart_commit_count' %} | ||
17 | {% include 'measurement_chart.html' %} | 14 | {% include 'measurement_chart.html' %} |
18 | {% endfor %} | 15 | {% endfor %} |
19 | {% endif %} | 16 | {% endif %} |
@@ -23,28 +20,29 @@ var chartsDrawing = 0; | |||
23 | 20 | ||
24 | {# Styles #} | 21 | {# Styles #} |
25 | <style> | 22 | <style> |
23 | :root { | ||
24 | --text: #000; | ||
25 | --bg: #fff; | ||
26 | --h2heading: #707070; | ||
27 | --link: #0000EE; | ||
28 | --trtopborder: #9ca3af; | ||
29 | --trborder: #e5e7eb; | ||
30 | --chartborder: #f0f0f0; | ||
31 | } | ||
26 | .meta-table { | 32 | .meta-table { |
27 | font-size: 14px; | 33 | font-size: 14px; |
28 | text-align: left; | 34 | text-align: left; |
29 | border-collapse: collapse; | 35 | border-collapse: collapse; |
30 | } | 36 | } |
31 | .meta-table tr:nth-child(even){background-color: #f2f2f2} | ||
32 | meta-table th, .meta-table td { | ||
33 | padding: 4px; | ||
34 | } | ||
35 | .summary { | 37 | .summary { |
36 | margin: 0; | ||
37 | font-size: 14px; | 38 | font-size: 14px; |
38 | text-align: left; | 39 | text-align: left; |
39 | border-collapse: collapse; | 40 | border-collapse: collapse; |
40 | } | 41 | } |
41 | summary th, .meta-table td { | ||
42 | padding: 4px; | ||
43 | } | ||
44 | .measurement { | 42 | .measurement { |
45 | padding: 8px 0px 8px 8px; | 43 | padding: 8px 0px 8px 8px; |
46 | border: 2px solid #f0f0f0; | 44 | border: 2px solid var(--chartborder); |
47 | margin-bottom: 10px; | 45 | margin: 1.5rem 0; |
48 | } | 46 | } |
49 | .details { | 47 | .details { |
50 | margin: 0; | 48 | margin: 0; |
@@ -64,18 +62,97 @@ summary th, .meta-table td { | |||
64 | background-color: #f0f0f0; | 62 | background-color: #f0f0f0; |
65 | margin-left: 10px; | 63 | margin-left: 10px; |
66 | } | 64 | } |
67 | hr { | 65 | .card-container { |
68 | color: #f0f0f0; | 66 | border-bottom-width: 1px; |
67 | padding: 1.25rem 3rem; | ||
68 | box-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1); | ||
69 | border-radius: 0.25rem; | ||
70 | } | ||
71 | body { | ||
72 | font-family: 'Helvetica', sans-serif; | ||
73 | margin: 3rem 8rem; | ||
74 | background-color: var(--bg); | ||
75 | color: var(--text); | ||
76 | } | ||
77 | h1 { | ||
78 | text-align: center; | ||
69 | } | 79 | } |
70 | h2 { | 80 | h2 { |
71 | font-size: 20px; | 81 | font-size: 1.5rem; |
72 | margin-bottom: 0px; | 82 | margin-bottom: 0px; |
73 | color: #707070; | 83 | color: var(--h2heading); |
84 | padding-top: 1.5rem; | ||
74 | } | 85 | } |
75 | h3 { | 86 | h3 { |
76 | font-size: 16px; | 87 | font-size: 1.3rem; |
77 | margin: 0px; | 88 | margin: 0px; |
78 | color: #707070; | 89 | color: var(--h2heading); |
90 | padding: 1.5rem 0; | ||
91 | } | ||
92 | h4 { | ||
93 | font-size: 14px; | ||
94 | font-weight: lighter; | ||
95 | line-height: 1.2rem; | ||
96 | margin: auto; | ||
97 | padding-top: 1rem; | ||
98 | } | ||
99 | table { | ||
100 | margin-top: 1.5rem; | ||
101 | line-height: 2rem; | ||
102 | } | ||
103 | tr { | ||
104 | border-bottom: 1px solid var(--trborder); | ||
105 | } | ||
106 | tr:first-child { | ||
107 | border-bottom: 1px solid var(--trtopborder); | ||
108 | } | ||
109 | tr:last-child { | ||
110 | border-bottom: none; | ||
111 | } | ||
112 | a { | ||
113 | text-decoration: none; | ||
114 | font-weight: bold; | ||
115 | color: var(--link); | ||
116 | } | ||
117 | a:hover { | ||
118 | color: #8080ff; | ||
119 | } | ||
120 | button { | ||
121 | background-color: #F3F4F6; | ||
122 | border: none; | ||
123 | outline: none; | ||
124 | cursor: pointer; | ||
125 | padding: 10px 12px; | ||
126 | transition: 0.3s; | ||
127 | border-radius: 8px; | ||
128 | color: #3A4353; | ||
129 | } | ||
130 | button:hover { | ||
131 | background-color: #d6d9e0; | ||
132 | } | ||
133 | .tab button.active { | ||
134 | background-color: #d6d9e0; | ||
135 | } | ||
136 | @media (prefers-color-scheme: dark) { | ||
137 | :root { | ||
138 | --text: #e9e8fa; | ||
139 | --bg: #0F0C28; | ||
140 | --h2heading: #B8B7CB; | ||
141 | --link: #87cefa; | ||
142 | --trtopborder: #394150; | ||
143 | --trborder: #212936; | ||
144 | --chartborder: #b1b0bf; | ||
145 | } | ||
146 | button { | ||
147 | background-color: #28303E; | ||
148 | color: #fff; | ||
149 | } | ||
150 | button:hover { | ||
151 | background-color: #545a69; | ||
152 | } | ||
153 | .tab button.active { | ||
154 | background-color: #545a69; | ||
155 | } | ||
79 | } | 156 | } |
80 | </style> | 157 | </style> |
81 | 158 | ||
@@ -83,13 +160,14 @@ h3 { | |||
83 | </head> | 160 | </head> |
84 | 161 | ||
85 | {% macro poky_link(commit) -%} | 162 | {% macro poky_link(commit) -%} |
86 | <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a> | 163 | <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a> |
87 | {%- endmacro %} | 164 | {%- endmacro %} |
88 | 165 | ||
89 | <body><div style="width: 700px"> | 166 | <body><div> |
167 | <h1 style="text-align: center;">Performance Test Report</h1> | ||
90 | {# Test metadata #} | 168 | {# Test metadata #} |
91 | <h2>General</h2> | 169 | <h2>General</h2> |
92 | <hr> | 170 | <h4>The table provides an overview of the comparison between two selected commits from the same branch.</h4> |
93 | <table class="meta-table" style="width: 100%"> | 171 | <table class="meta-table" style="width: 100%"> |
94 | <tr> | 172 | <tr> |
95 | <th></th> | 173 | <th></th> |
@@ -112,19 +190,21 @@ h3 { | |||
112 | 190 | ||
113 | {# Test result summary #} | 191 | {# Test result summary #} |
114 | <h2>Test result summary</h2> | 192 | <h2>Test result summary</h2> |
115 | <hr> | 193 | <h4>The test summary presents a thorough breakdown of each test conducted on the branch, including details such as build time and disk space consumption. Additionally, it gives insights into the average time taken for test execution, along with absolute and relative values for a better understanding.</h4> |
116 | <table class="summary" style="width: 100%"> | 194 | <table class="summary" style="width: 100%"> |
195 | <tr> | ||
196 | <th>Test name</th> | ||
197 | <th>Measurement description</th> | ||
198 | <th>Mean value</th> | ||
199 | <th>Absolute difference</th> | ||
200 | <th>Relative difference</th> | ||
201 | </tr> | ||
117 | {% for test in test_data %} | 202 | {% for test in test_data %} |
118 | {% if loop.index is even %} | ||
119 | {% set row_style = 'style="background-color: #f2f2f2"' %} | ||
120 | {% else %} | ||
121 | {% set row_style = 'style="background-color: #ffffff"' %} | ||
122 | {% endif %} | ||
123 | {% if test.status == 'SUCCESS' %} | 203 | {% if test.status == 'SUCCESS' %} |
124 | {% for measurement in test.measurements %} | 204 | {% for measurement in test.measurements %} |
125 | <tr {{ row_style }}> | 205 | <tr {{ row_style }}> |
126 | {% if loop.index == 1 %} | 206 | {% if loop.index == 1 %} |
127 | <td>{{ test.name }}: {{ test.description }}</td> | 207 | <td><a href=#{{test.name}}>{{ test.name }}: {{ test.description }}</a></td> |
128 | {% else %} | 208 | {% else %} |
129 | {# add empty cell in place of the test name#} | 209 | {# add empty cell in place of the test name#} |
130 | <td></td> | 210 | <td></td> |
@@ -153,10 +233,12 @@ h3 { | |||
153 | </table> | 233 | </table> |
154 | 234 | ||
155 | {# Detailed test results #} | 235 | {# Detailed test results #} |
236 | <h2>Test details</h2> | ||
237 | <h4>The following section provides details of each test, accompanied by charts representing build time and disk usage over time or by commit number.</h4> | ||
156 | {% for test in test_data %} | 238 | {% for test in test_data %} |
157 | <h2>{{ test.name }}: {{ test.description }}</h2> | 239 | <h3 style="color: #000;" id={{test.name}}>{{ test.name }}: {{ test.description }}</h3> |
158 | <hr> | ||
159 | {% if test.status == 'SUCCESS' %} | 240 | {% if test.status == 'SUCCESS' %} |
241 | <div class="card-container"> | ||
160 | {% for measurement in test.measurements %} | 242 | {% for measurement in test.measurements %} |
161 | <div class="measurement"> | 243 | <div class="measurement"> |
162 | <h3>{{ measurement.description }}</h3> | 244 | <h3>{{ measurement.description }}</h3> |
@@ -178,7 +260,18 @@ h3 { | |||
178 | <tr> | 260 | <tr> |
179 | <td style="width: 75%"> | 261 | <td style="width: 75%"> |
180 | {# Linechart #} | 262 | {# Linechart #} |
181 | <div id="{{ test.name }}_{{ measurement.name }}_chart"></div> | 263 | <div class="tab {{ test.name }}_{{ measurement.name }}_tablinks"> |
264 | <button class="tablinks active" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_start_time', '{{ test.name }}_{{ measurement.name }}')">Chart with start time</button> | ||
265 | <button class="tablinks" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_commit_count', '{{ test.name }}_{{ measurement.name }}')">Chart with commit count</button> | ||
266 | </div> | ||
267 | <div class="{{ test.name }}_{{ measurement.name }}_tabcontent"> | ||
268 | <div id="{{ test.name }}_{{ measurement.name }}_start_time" class="tabcontent" style="display: block;"> | ||
269 | <div id="{{ test.name }}_{{ measurement.name }}_chart_start_time"></div> | ||
270 | </div> | ||
271 | <div id="{{ test.name }}_{{ measurement.name }}_commit_count" class="tabcontent" style="display: none;"> | ||
272 | <div id="{{ test.name }}_{{ measurement.name }}_chart_commit_count"></div> | ||
273 | </div> | ||
274 | </div> | ||
182 | </td> | 275 | </td> |
183 | <td> | 276 | <td> |
184 | {# Measurement statistics #} | 277 | {# Measurement statistics #} |
@@ -275,7 +368,8 @@ h3 { | |||
275 | {% endif %} | 368 | {% endif %} |
276 | {% endif %} | 369 | {% endif %} |
277 | </div> | 370 | </div> |
278 | {% endfor %} | 371 | {% endfor %} |
372 | </div> | ||
279 | {# Unsuccessful test #} | 373 | {# Unsuccessful test #} |
280 | {% else %} | 374 | {% else %} |
281 | <span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }} | 375 | <span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }} |
@@ -284,6 +378,31 @@ h3 { | |||
284 | <div class="preformatted">{{ test.message }}</div> | 378 | <div class="preformatted">{{ test.message }}</div> |
285 | {% endif %} | 379 | {% endif %} |
286 | {% endfor %} | 380 | {% endfor %} |
287 | </div></body> | 381 | </div> |
288 | </html> | ||
289 | 382 | ||
383 | <script> | ||
384 | function openChart(event, chartType, chartName) { | ||
385 | let i, tabcontents, tablinks | ||
386 | tabcontents = document.querySelectorAll(`.${chartName}_tabcontent > .tabcontent`); | ||
387 | tabcontents.forEach((tabcontent) => { | ||
388 | tabcontent.style.display = "none"; | ||
389 | }); | ||
390 | |||
391 | tablinks = document.querySelectorAll(`.${chartName}_tablinks > .tablinks`); | ||
392 | tablinks.forEach((tabLink) => { | ||
393 | tabLink.classList.remove('active'); | ||
394 | }); | ||
395 | |||
396 | const targetTab = document.getElementById(chartType) | ||
397 | targetTab.style.display = "block"; | ||
398 | |||
399 | // Call resize on the ECharts instance to redraw the chart | ||
400 | const chartContainer = targetTab.querySelector('div') | ||
401 | echarts.init(chartContainer).resize(); | ||
402 | |||
403 | event.currentTarget.classList.add('active'); | ||
404 | } | ||
405 | </script> | ||
406 | |||
407 | </body> | ||
408 | </html> | ||
diff --git a/scripts/lib/build_perf/report.py b/scripts/lib/build_perf/report.py index ab77424cc7..f4e6a92e09 100644 --- a/scripts/lib/build_perf/report.py +++ b/scripts/lib/build_perf/report.py | |||
@@ -294,7 +294,7 @@ class SizeVal(MeasurementVal): | |||
294 | return "null" | 294 | return "null" |
295 | return self / 1024 | 295 | return self / 1024 |
296 | 296 | ||
297 | def measurement_stats(meas, prefix=''): | 297 | def measurement_stats(meas, prefix='', time=0): |
298 | """Get statistics of a measurement""" | 298 | """Get statistics of a measurement""" |
299 | if not meas: | 299 | if not meas: |
300 | return {prefix + 'sample_cnt': 0, | 300 | return {prefix + 'sample_cnt': 0, |
@@ -319,6 +319,8 @@ def measurement_stats(meas, prefix=''): | |||
319 | stats['quantity'] = val_cls.quantity | 319 | stats['quantity'] = val_cls.quantity |
320 | stats[prefix + 'sample_cnt'] = len(values) | 320 | stats[prefix + 'sample_cnt'] = len(values) |
321 | 321 | ||
322 | # Add start time for both type sysres and disk usage | ||
323 | start_time = time | ||
322 | mean_val = val_cls(mean(values)) | 324 | mean_val = val_cls(mean(values)) |
323 | min_val = val_cls(min(values)) | 325 | min_val = val_cls(min(values)) |
324 | max_val = val_cls(max(values)) | 326 | max_val = val_cls(max(values)) |
@@ -334,6 +336,7 @@ def measurement_stats(meas, prefix=''): | |||
334 | stats[prefix + 'max'] = max_val | 336 | stats[prefix + 'max'] = max_val |
335 | stats[prefix + 'minus'] = val_cls(mean_val - min_val) | 337 | stats[prefix + 'minus'] = val_cls(mean_val - min_val) |
336 | stats[prefix + 'plus'] = val_cls(max_val - mean_val) | 338 | stats[prefix + 'plus'] = val_cls(max_val - mean_val) |
339 | stats[prefix + 'start_time'] = start_time | ||
337 | 340 | ||
338 | return stats | 341 | return stats |
339 | 342 | ||
diff --git a/scripts/lib/checklayer/__init__.py b/scripts/lib/checklayer/__init__.py index 62ecdfe390..86aadf39a6 100644 --- a/scripts/lib/checklayer/__init__.py +++ b/scripts/lib/checklayer/__init__.py | |||
@@ -452,3 +452,15 @@ def compare_signatures(old_sigs, curr_sigs): | |||
452 | msg.extend([' ' + line for line in output.splitlines()]) | 452 | msg.extend([' ' + line for line in output.splitlines()]) |
453 | msg.append('') | 453 | msg.append('') |
454 | return '\n'.join(msg) | 454 | return '\n'.join(msg) |
455 | |||
456 | |||
457 | def get_git_toplevel(directory): | ||
458 | """ | ||
459 | Try and find the top of the git repository that directory might be in. | ||
460 | Returns the top-level directory, or None. | ||
461 | """ | ||
462 | cmd = ["git", "-C", directory, "rev-parse", "--show-toplevel"] | ||
463 | try: | ||
464 | return subprocess.check_output(cmd, text=True).strip() | ||
465 | except: | ||
466 | return None | ||
diff --git a/scripts/lib/checklayer/cases/common.py b/scripts/lib/checklayer/cases/common.py index 97b16f78c8..ddead69a7b 100644 --- a/scripts/lib/checklayer/cases/common.py +++ b/scripts/lib/checklayer/cases/common.py | |||
@@ -7,7 +7,7 @@ import glob | |||
7 | import os | 7 | import os |
8 | import unittest | 8 | import unittest |
9 | import re | 9 | import re |
10 | from checklayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures | 10 | from checklayer import get_signatures, LayerType, check_command, compare_signatures, get_git_toplevel |
11 | from checklayer.case import OECheckLayerTestCase | 11 | from checklayer.case import OECheckLayerTestCase |
12 | 12 | ||
13 | class CommonCheckLayer(OECheckLayerTestCase): | 13 | class CommonCheckLayer(OECheckLayerTestCase): |
@@ -40,6 +40,38 @@ class CommonCheckLayer(OECheckLayerTestCase): | |||
40 | email_regex = re.compile(r"[^@]+@[^@]+") | 40 | email_regex = re.compile(r"[^@]+@[^@]+") |
41 | self.assertTrue(email_regex.match(data)) | 41 | self.assertTrue(email_regex.match(data)) |
42 | 42 | ||
43 | def find_file_by_name(self, globs): | ||
44 | """ | ||
45 | Utility function to find a file that matches the specified list of | ||
46 | globs, in either the layer directory itself or the repository top-level | ||
47 | directory. | ||
48 | """ | ||
49 | directories = [self.tc.layer["path"]] | ||
50 | toplevel = get_git_toplevel(directories[0]) | ||
51 | if toplevel: | ||
52 | directories.append(toplevel) | ||
53 | |||
54 | for path in directories: | ||
55 | for name in globs: | ||
56 | files = glob.glob(os.path.join(path, name)) | ||
57 | if files: | ||
58 | return sorted(files)[0] | ||
59 | return None | ||
60 | |||
61 | def test_security(self): | ||
62 | """ | ||
63 | Test that the layer has a SECURITY.md (or similar) file, either in the | ||
64 | layer itself or at the top of the containing git repository. | ||
65 | """ | ||
66 | if self.tc.layer["type"] == LayerType.CORE: | ||
67 | raise unittest.SkipTest("Core layer's SECURITY is top level") | ||
68 | |||
69 | filename = self.find_file_by_name(("SECURITY", "SECURITY.*")) | ||
70 | self.assertTrue(filename, msg="Layer doesn't contain a SECURITY.md file.") | ||
71 | |||
72 | size = os.path.getsize(filename) | ||
73 | self.assertGreater(size, 0, msg=f"{filename} has no content.") | ||
74 | |||
43 | def test_parse(self): | 75 | def test_parse(self): |
44 | check_command('Layer %s failed to parse.' % self.tc.layer['name'], | 76 | check_command('Layer %s failed to parse.' % self.tc.layer['name'], |
45 | 'bitbake -p') | 77 | 'bitbake -p') |
@@ -72,7 +104,6 @@ class CommonCheckLayer(OECheckLayerTestCase): | |||
72 | self.tc.layer['name']) | 104 | self.tc.layer['name']) |
73 | self.fail('\n'.join(msg)) | 105 | self.fail('\n'.join(msg)) |
74 | 106 | ||
75 | @unittest.expectedFailure | ||
76 | def test_patches_upstream_status(self): | 107 | def test_patches_upstream_status(self): |
77 | import sys | 108 | import sys |
78 | sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/')) | 109 | sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/')) |
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py index 6133c1c5b4..fa6e1a34fd 100644 --- a/scripts/lib/devtool/__init__.py +++ b/scripts/lib/devtool/__init__.py | |||
@@ -234,7 +234,7 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None): | |||
234 | f.write(line) | 234 | f.write(line) |
235 | 235 | ||
236 | bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) | 236 | bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) |
237 | bb.process.run('git tag -f %s' % basetag, cwd=repodir) | 237 | bb.process.run('git tag -f --no-sign %s' % basetag, cwd=repodir) |
238 | 238 | ||
239 | # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now, | 239 | # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now, |
240 | # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe | 240 | # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe |
@@ -256,7 +256,7 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None): | |||
256 | oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d) | 256 | oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d) |
257 | found = False | 257 | found = False |
258 | if os.path.exists(os.path.join(repodir, '.gitmodules')): | 258 | if os.path.exists(os.path.join(repodir, '.gitmodules')): |
259 | bb.process.run('git submodule foreach --recursive "git tag -f %s"' % basetag, cwd=repodir) | 259 | bb.process.run('git submodule foreach --recursive "git tag -f --no-sign %s"' % basetag, cwd=repodir) |
260 | 260 | ||
261 | def recipe_to_append(recipefile, config, wildcard=False): | 261 | def recipe_to_append(recipefile, config, wildcard=False): |
262 | """ | 262 | """ |
diff --git a/scripts/lib/devtool/build.py b/scripts/lib/devtool/build.py index 935ffab46c..0b2c3d33dc 100644 --- a/scripts/lib/devtool/build.py +++ b/scripts/lib/devtool/build.py | |||
@@ -49,7 +49,7 @@ def build(args, config, basepath, workspace): | |||
49 | rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) | 49 | rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) |
50 | if not rd: | 50 | if not rd: |
51 | return 1 | 51 | return 1 |
52 | deploytask = 'do_deploy' in rd.getVar('__BBTASKS') | 52 | deploytask = 'do_deploy' in bb.build.listtasks(rd) |
53 | finally: | 53 | finally: |
54 | tinfoil.shutdown() | 54 | tinfoil.shutdown() |
55 | 55 | ||
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py index 1cd4831d2b..990303982c 100644 --- a/scripts/lib/devtool/build_sdk.py +++ b/scripts/lib/devtool/build_sdk.py | |||
@@ -5,14 +5,7 @@ | |||
5 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
6 | # | 6 | # |
7 | 7 | ||
8 | import os | ||
9 | import subprocess | ||
10 | import logging | 8 | import logging |
11 | import glob | ||
12 | import shutil | ||
13 | import errno | ||
14 | import sys | ||
15 | import tempfile | ||
16 | from devtool import DevtoolError | 9 | from devtool import DevtoolError |
17 | from devtool import build_image | 10 | from devtool import build_image |
18 | 11 | ||
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py index a62b93224e..ee5bb57265 100644 --- a/scripts/lib/devtool/ide_plugins/ide_code.py +++ b/scripts/lib/devtool/ide_plugins/ide_code.py | |||
@@ -161,7 +161,6 @@ class IdeVSCode(IdeBase): | |||
161 | if modified_recipe.build_tool is not BuildTool.CMAKE: | 161 | if modified_recipe.build_tool is not BuildTool.CMAKE: |
162 | return | 162 | return |
163 | recommendations += [ | 163 | recommendations += [ |
164 | "twxs.cmake", | ||
165 | "ms-vscode.cmake-tools", | 164 | "ms-vscode.cmake-tools", |
166 | "ms-vscode.cpptools", | 165 | "ms-vscode.cpptools", |
167 | "ms-vscode.cpptools-extension-pack", | 166 | "ms-vscode.cpptools-extension-pack", |
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py index 7807b322b3..931408fa74 100755 --- a/scripts/lib/devtool/ide_sdk.py +++ b/scripts/lib/devtool/ide_sdk.py | |||
@@ -167,7 +167,7 @@ class RecipeImage: | |||
167 | self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg') | 167 | self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg') |
168 | 168 | ||
169 | self.gdbserver_missing = 'gdbserver' not in image_d.getVar( | 169 | self.gdbserver_missing = 'gdbserver' not in image_d.getVar( |
170 | 'IMAGE_INSTALL') | 170 | 'IMAGE_INSTALL') and 'tools-debug' not in image_d.getVar('IMAGE_FEATURES') |
171 | 171 | ||
172 | @property | 172 | @property |
173 | def debug_support(self): | 173 | def debug_support(self): |
@@ -288,6 +288,7 @@ class RecipeModified: | |||
288 | self.bblayers = None | 288 | self.bblayers = None |
289 | self.bpn = None | 289 | self.bpn = None |
290 | self.d = None | 290 | self.d = None |
291 | self.debug_build = None | ||
291 | self.fakerootcmd = None | 292 | self.fakerootcmd = None |
292 | self.fakerootenv = None | 293 | self.fakerootenv = None |
293 | self.libdir = None | 294 | self.libdir = None |
@@ -333,7 +334,7 @@ class RecipeModified: | |||
333 | self.srctree = workspace[workspacepn]['srctree'] | 334 | self.srctree = workspace[workspacepn]['srctree'] |
334 | # Need to grab this here in case the source is within a subdirectory | 335 | # Need to grab this here in case the source is within a subdirectory |
335 | self.real_srctree = get_real_srctree( | 336 | self.real_srctree = get_real_srctree( |
336 | self.srctree, recipe_d.getVar('S'), recipe_d.getVar('WORKDIR')) | 337 | self.srctree, recipe_d.getVar('S'), recipe_d.getVar('UNPACKDIR')) |
337 | self.bbappend = workspace[workspacepn]['bbappend'] | 338 | self.bbappend = workspace[workspacepn]['bbappend'] |
338 | 339 | ||
339 | self.ide_sdk_dir = os.path.join( | 340 | self.ide_sdk_dir = os.path.join( |
@@ -348,6 +349,7 @@ class RecipeModified: | |||
348 | self.bpn = recipe_d.getVar('BPN') | 349 | self.bpn = recipe_d.getVar('BPN') |
349 | self.cxx = recipe_d.getVar('CXX') | 350 | self.cxx = recipe_d.getVar('CXX') |
350 | self.d = recipe_d.getVar('D') | 351 | self.d = recipe_d.getVar('D') |
352 | self.debug_build = recipe_d.getVar('DEBUG_BUILD') | ||
351 | self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD') | 353 | self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD') |
352 | self.fakerootenv = recipe_d.getVar('FAKEROOTENV') | 354 | self.fakerootenv = recipe_d.getVar('FAKEROOTENV') |
353 | self.libdir = recipe_d.getVar('libdir') | 355 | self.libdir = recipe_d.getVar('libdir') |
@@ -389,17 +391,6 @@ class RecipeModified: | |||
389 | self.recipe_id = self.bpn + "-" + self.package_arch | 391 | self.recipe_id = self.bpn + "-" + self.package_arch |
390 | self.recipe_id_pretty = self.bpn + ": " + self.package_arch | 392 | self.recipe_id_pretty = self.bpn + ": " + self.package_arch |
391 | 393 | ||
392 | def append_to_bbappend(self, append_text): | ||
393 | with open(self.bbappend, 'a') as bbap: | ||
394 | bbap.write(append_text) | ||
395 | |||
396 | def remove_from_bbappend(self, append_text): | ||
397 | with open(self.bbappend, 'r') as bbap: | ||
398 | text = bbap.read() | ||
399 | new_text = text.replace(append_text, '') | ||
400 | with open(self.bbappend, 'w') as bbap: | ||
401 | bbap.write(new_text) | ||
402 | |||
403 | @staticmethod | 394 | @staticmethod |
404 | def is_valid_shell_variable(var): | 395 | def is_valid_shell_variable(var): |
405 | """Skip strange shell variables like systemd | 396 | """Skip strange shell variables like systemd |
@@ -412,34 +403,6 @@ class RecipeModified: | |||
412 | return True | 403 | return True |
413 | return False | 404 | return False |
414 | 405 | ||
415 | def debug_build_config(self, args): | ||
416 | """Explicitely set for example CMAKE_BUILD_TYPE to Debug if not defined otherwise""" | ||
417 | if self.build_tool is BuildTool.CMAKE: | ||
418 | append_text = os.linesep + \ | ||
419 | 'OECMAKE_ARGS:append = " -DCMAKE_BUILD_TYPE:STRING=Debug"' + os.linesep | ||
420 | if args.debug_build_config and not 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars: | ||
421 | self.cmake_cache_vars['CMAKE_BUILD_TYPE'] = { | ||
422 | "type": "STRING", | ||
423 | "value": "Debug", | ||
424 | } | ||
425 | self.append_to_bbappend(append_text) | ||
426 | elif 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars: | ||
427 | del self.cmake_cache_vars['CMAKE_BUILD_TYPE'] | ||
428 | self.remove_from_bbappend(append_text) | ||
429 | elif self.build_tool is BuildTool.MESON: | ||
430 | append_text = os.linesep + 'MESON_BUILDTYPE = "debug"' + os.linesep | ||
431 | if args.debug_build_config and self.meson_buildtype != "debug": | ||
432 | self.mesonopts.replace( | ||
433 | '--buildtype ' + self.meson_buildtype, '--buildtype debug') | ||
434 | self.append_to_bbappend(append_text) | ||
435 | elif self.meson_buildtype == "debug": | ||
436 | self.mesonopts.replace( | ||
437 | '--buildtype debug', '--buildtype plain') | ||
438 | self.remove_from_bbappend(append_text) | ||
439 | elif args.debug_build_config: | ||
440 | logger.warn( | ||
441 | "--debug-build-config is not implemented for this build tool yet.") | ||
442 | |||
443 | def solib_search_path(self, image): | 406 | def solib_search_path(self, image): |
444 | """Search for debug symbols in the rootfs and rootfs-dbg | 407 | """Search for debug symbols in the rootfs and rootfs-dbg |
445 | 408 | ||
@@ -493,7 +456,7 @@ class RecipeModified: | |||
493 | 456 | ||
494 | vars = (key for key in d.keys() if not key.startswith( | 457 | vars = (key for key in d.keys() if not key.startswith( |
495 | "__") and not d.getVarFlag(key, "func", False)) | 458 | "__") and not d.getVarFlag(key, "func", False)) |
496 | for var in vars: | 459 | for var in sorted(vars): |
497 | func = d.getVarFlag(var, "func", False) | 460 | func = d.getVarFlag(var, "func", False) |
498 | if d.getVarFlag(var, 'python', False) and func: | 461 | if d.getVarFlag(var, 'python', False) and func: |
499 | continue | 462 | continue |
@@ -545,7 +508,7 @@ class RecipeModified: | |||
545 | cache_vars = {} | 508 | cache_vars = {} |
546 | oecmake_args = d.getVar('OECMAKE_ARGS').split() | 509 | oecmake_args = d.getVar('OECMAKE_ARGS').split() |
547 | extra_oecmake = d.getVar('EXTRA_OECMAKE').split() | 510 | extra_oecmake = d.getVar('EXTRA_OECMAKE').split() |
548 | for param in oecmake_args + extra_oecmake: | 511 | for param in sorted(oecmake_args + extra_oecmake): |
549 | d_pref = "-D" | 512 | d_pref = "-D" |
550 | if param.startswith(d_pref): | 513 | if param.startswith(d_pref): |
551 | param = param[len(d_pref):] | 514 | param = param[len(d_pref):] |
@@ -712,42 +675,6 @@ class RecipeModified: | |||
712 | binaries.append(abs_name[d_len:]) | 675 | binaries.append(abs_name[d_len:]) |
713 | return sorted(binaries) | 676 | return sorted(binaries) |
714 | 677 | ||
715 | def gen_delete_package_dirs(self): | ||
716 | """delete folders of package tasks | ||
717 | |||
718 | This is a workaround for and issue with recipes having their sources | ||
719 | downloaded as file:// | ||
720 | This likely breaks pseudo like: | ||
721 | path mismatch [3 links]: ino 79147802 db | ||
722 | .../build/tmp/.../cmake-example/1.0/package/usr/src/debug/ | ||
723 | cmake-example/1.0-r0/oe-local-files/cpp-example-lib.cpp | ||
724 | .../build/workspace/sources/cmake-example/oe-local-files/cpp-example-lib.cpp | ||
725 | Since the files are anyway outdated lets deleted them (also from pseudo's db) to workaround this issue. | ||
726 | """ | ||
727 | cmd_lines = ['#!/bin/sh'] | ||
728 | |||
729 | # Set up the appropriate environment | ||
730 | newenv = dict(os.environ) | ||
731 | for varvalue in self.fakerootenv.split(): | ||
732 | if '=' in varvalue: | ||
733 | splitval = varvalue.split('=', 1) | ||
734 | newenv[splitval[0]] = splitval[1] | ||
735 | |||
736 | # Replicate the environment variables from bitbake | ||
737 | for var, val in newenv.items(): | ||
738 | if not RecipeModified.is_valid_shell_variable(var): | ||
739 | continue | ||
740 | cmd_lines.append('%s="%s"' % (var, val)) | ||
741 | cmd_lines.append('export %s' % var) | ||
742 | |||
743 | # Delete the folders | ||
744 | pkg_dirs = ' '.join([os.path.join(self.workdir, d) for d in [ | ||
745 | "package", "packages-split", "pkgdata", "sstate-install-package", "debugsources.list", "*.spec"]]) | ||
746 | cmd = "%s rm -rf %s" % (self.fakerootcmd, pkg_dirs) | ||
747 | cmd_lines.append('%s || { "%s failed"; exit 1; }' % (cmd, cmd)) | ||
748 | |||
749 | return self.write_script(cmd_lines, 'delete_package_dirs') | ||
750 | |||
751 | def gen_deploy_target_script(self, args): | 678 | def gen_deploy_target_script(self, args): |
752 | """Generate a script which does what devtool deploy-target does | 679 | """Generate a script which does what devtool deploy-target does |
753 | 680 | ||
@@ -785,8 +712,6 @@ class RecipeModified: | |||
785 | """Generate a script which does install and deploy""" | 712 | """Generate a script which does install and deploy""" |
786 | cmd_lines = ['#!/bin/bash'] | 713 | cmd_lines = ['#!/bin/bash'] |
787 | 714 | ||
788 | cmd_lines.append(self.gen_delete_package_dirs()) | ||
789 | |||
790 | # . oe-init-build-env $BUILDDIR | 715 | # . oe-init-build-env $BUILDDIR |
791 | # Note: Sourcing scripts with arguments requires bash | 716 | # Note: Sourcing scripts with arguments requires bash |
792 | cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % ( | 717 | cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % ( |
@@ -988,6 +913,13 @@ def ide_setup(args, config, basepath, workspace): | |||
988 | recipe_modified.gen_meson_wrapper() | 913 | recipe_modified.gen_meson_wrapper() |
989 | ide.setup_modified_recipe( | 914 | ide.setup_modified_recipe( |
990 | args, recipe_image, recipe_modified) | 915 | args, recipe_image, recipe_modified) |
916 | |||
917 | if recipe_modified.debug_build != '1': | ||
918 | logger.warn( | ||
919 | 'Recipe %s is compiled with release build configuration. ' | ||
920 | 'You might want to add DEBUG_BUILD = "1" to %s. ' | ||
921 | 'Note that devtool modify --debug-build can do this automatically.', | ||
922 | recipe_modified.name, recipe_modified.bbappend) | ||
991 | else: | 923 | else: |
992 | raise DevtoolError("Must not end up here.") | 924 | raise DevtoolError("Must not end up here.") |
993 | 925 | ||
@@ -995,6 +927,15 @@ def ide_setup(args, config, basepath, workspace): | |||
995 | def register_commands(subparsers, context): | 927 | def register_commands(subparsers, context): |
996 | """Register devtool subcommands from this plugin""" | 928 | """Register devtool subcommands from this plugin""" |
997 | 929 | ||
930 | # The ide-sdk command bootstraps the SDK from the bitbake environment before the IDE | ||
931 | # configuration is generated. In the case of the eSDK, the bootstrapping is performed | ||
932 | # during the installation of the eSDK installer. Running the ide-sdk plugin from an | ||
933 | # eSDK installer-based setup would require skipping the bootstrapping and probably | ||
934 | # taking some other differences into account when generating the IDE configurations. | ||
935 | # This would be possible. But it is not implemented. | ||
936 | if context.fixed_setup: | ||
937 | return | ||
938 | |||
998 | global ide_plugins | 939 | global ide_plugins |
999 | 940 | ||
1000 | # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins. | 941 | # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins. |
@@ -1015,7 +956,7 @@ def register_commands(subparsers, context): | |||
1015 | help='Setup the SDK and configure the IDE') | 956 | help='Setup the SDK and configure the IDE') |
1016 | parser_ide_sdk.add_argument( | 957 | parser_ide_sdk.add_argument( |
1017 | 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n' | 958 | 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n' |
1018 | 'Depending on the --mode paramter different types of SDKs and IDE configurations are generated.') | 959 | 'Depending on the --mode parameter different types of SDKs and IDE configurations are generated.') |
1019 | parser_ide_sdk.add_argument( | 960 | parser_ide_sdk.add_argument( |
1020 | '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified, | 961 | '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified, |
1021 | help='Different SDK types are supported:\n' | 962 | help='Different SDK types are supported:\n' |
@@ -1052,7 +993,7 @@ def register_commands(subparsers, context): | |||
1052 | parser_ide_sdk.add_argument( | 993 | parser_ide_sdk.add_argument( |
1053 | '-I', '--key', help='Specify ssh private key for connection to the target') | 994 | '-I', '--key', help='Specify ssh private key for connection to the target') |
1054 | parser_ide_sdk.add_argument( | 995 | parser_ide_sdk.add_argument( |
1055 | '--skip-bitbake', help='Generate IDE configuration but skip calling bibtake to update the SDK.', action='store_true') | 996 | '--skip-bitbake', help='Generate IDE configuration but skip calling bitbake to update the SDK', action='store_true') |
1056 | parser_ide_sdk.add_argument( | 997 | parser_ide_sdk.add_argument( |
1057 | '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true') | 998 | '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true') |
1058 | parser_ide_sdk.add_argument( | 999 | parser_ide_sdk.add_argument( |
@@ -1065,6 +1006,4 @@ def register_commands(subparsers, context): | |||
1065 | '-p', '--no-preserve', help='Do not preserve existing files', action='store_true') | 1006 | '-p', '--no-preserve', help='Do not preserve existing files', action='store_true') |
1066 | parser_ide_sdk.add_argument( | 1007 | parser_ide_sdk.add_argument( |
1067 | '--no-check-space', help='Do not check for available space before deploying', action='store_true') | 1008 | '--no-check-space', help='Do not check for available space before deploying', action='store_true') |
1068 | parser_ide_sdk.add_argument( | ||
1069 | '--debug-build-config', help='Use debug build flags, for example set CMAKE_BUILD_TYPE=Debug', action='store_true') | ||
1070 | parser_ide_sdk.set_defaults(func=ide_setup) | 1009 | parser_ide_sdk.set_defaults(func=ide_setup) |
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py index 18daef30c3..1054960551 100644 --- a/scripts/lib/devtool/menuconfig.py +++ b/scripts/lib/devtool/menuconfig.py | |||
@@ -23,9 +23,6 @@ | |||
23 | import os | 23 | import os |
24 | import bb | 24 | import bb |
25 | import logging | 25 | import logging |
26 | import argparse | ||
27 | import re | ||
28 | import glob | ||
29 | from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command | 26 | from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command |
30 | from devtool import check_workspace_recipe | 27 | from devtool import check_workspace_recipe |
31 | logger = logging.getLogger('devtool') | 28 | logger = logging.getLogger('devtool') |
@@ -34,7 +31,6 @@ def menuconfig(args, config, basepath, workspace): | |||
34 | """Entry point for the devtool 'menuconfig' subcommand""" | 31 | """Entry point for the devtool 'menuconfig' subcommand""" |
35 | 32 | ||
36 | rd = "" | 33 | rd = "" |
37 | kconfigpath = "" | ||
38 | pn_src = "" | 34 | pn_src = "" |
39 | localfilesdir = "" | 35 | localfilesdir = "" |
40 | workspace_dir = "" | 36 | workspace_dir = "" |
@@ -51,7 +47,6 @@ def menuconfig(args, config, basepath, workspace): | |||
51 | raise DevtoolError("This recipe does not support menuconfig option") | 47 | raise DevtoolError("This recipe does not support menuconfig option") |
52 | 48 | ||
53 | workspace_dir = os.path.join(config.workspace_path,'sources') | 49 | workspace_dir = os.path.join(config.workspace_path,'sources') |
54 | kconfigpath = rd.getVar('B') | ||
55 | pn_src = os.path.join(workspace_dir,pn) | 50 | pn_src = os.path.join(workspace_dir,pn) |
56 | 51 | ||
57 | # add check to see if oe_local_files exists or not | 52 | # add check to see if oe_local_files exists or not |
@@ -70,7 +65,7 @@ def menuconfig(args, config, basepath, workspace): | |||
70 | logger.info('Launching menuconfig') | 65 | logger.info('Launching menuconfig') |
71 | exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) | 66 | exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) |
72 | fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg') | 67 | fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg') |
73 | res = standard._create_kconfig_diff(pn_src,rd,fragment) | 68 | standard._create_kconfig_diff(pn_src,rd,fragment) |
74 | 69 | ||
75 | return 0 | 70 | return 0 |
76 | 71 | ||
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index 6674e67267..1fd5947c41 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py | |||
@@ -18,11 +18,13 @@ import argparse_oe | |||
18 | import scriptutils | 18 | import scriptutils |
19 | import errno | 19 | import errno |
20 | import glob | 20 | import glob |
21 | import filecmp | ||
22 | from collections import OrderedDict | 21 | from collections import OrderedDict |
22 | |||
23 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError | 23 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError |
24 | from devtool import parse_recipe | 24 | from devtool import parse_recipe |
25 | 25 | ||
26 | import bb.utils | ||
27 | |||
26 | logger = logging.getLogger('devtool') | 28 | logger = logging.getLogger('devtool') |
27 | 29 | ||
28 | override_branch_prefix = 'devtool-override-' | 30 | override_branch_prefix = 'devtool-override-' |
@@ -30,7 +32,8 @@ override_branch_prefix = 'devtool-override-' | |||
30 | 32 | ||
31 | def add(args, config, basepath, workspace): | 33 | def add(args, config, basepath, workspace): |
32 | """Entry point for the devtool 'add' subcommand""" | 34 | """Entry point for the devtool 'add' subcommand""" |
33 | import bb | 35 | import bb.data |
36 | import bb.process | ||
34 | import oe.recipeutils | 37 | import oe.recipeutils |
35 | 38 | ||
36 | if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: | 39 | if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: |
@@ -206,7 +209,7 @@ def add(args, config, basepath, workspace): | |||
206 | for fn in os.listdir(tempdir): | 209 | for fn in os.listdir(tempdir): |
207 | shutil.move(os.path.join(tempdir, fn), recipedir) | 210 | shutil.move(os.path.join(tempdir, fn), recipedir) |
208 | else: | 211 | else: |
209 | raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout)) | 212 | raise DevtoolError(f'Failed to create a recipe file for source {source}') |
210 | attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) | 213 | attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) |
211 | if os.path.exists(attic_recipe): | 214 | if os.path.exists(attic_recipe): |
212 | logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) | 215 | logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) |
@@ -305,6 +308,7 @@ def add(args, config, basepath, workspace): | |||
305 | 308 | ||
306 | def _check_compatible_recipe(pn, d): | 309 | def _check_compatible_recipe(pn, d): |
307 | """Check if the recipe is supported by devtool""" | 310 | """Check if the recipe is supported by devtool""" |
311 | import bb.data | ||
308 | if pn == 'perf': | 312 | if pn == 'perf': |
309 | raise DevtoolError("The perf recipe does not actually check out " | 313 | raise DevtoolError("The perf recipe does not actually check out " |
310 | "source and thus cannot be supported by this tool", | 314 | "source and thus cannot be supported by this tool", |
@@ -374,7 +378,7 @@ def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None): | |||
374 | 378 | ||
375 | def _git_ls_tree(repodir, treeish='HEAD', recursive=False): | 379 | def _git_ls_tree(repodir, treeish='HEAD', recursive=False): |
376 | """List contents of a git treeish""" | 380 | """List contents of a git treeish""" |
377 | import bb | 381 | import bb.process |
378 | cmd = ['git', 'ls-tree', '-z', treeish] | 382 | cmd = ['git', 'ls-tree', '-z', treeish] |
379 | if recursive: | 383 | if recursive: |
380 | cmd.append('-r') | 384 | cmd.append('-r') |
@@ -387,6 +391,19 @@ def _git_ls_tree(repodir, treeish='HEAD', recursive=False): | |||
387 | ret[split[3]] = split[0:3] | 391 | ret[split[3]] = split[0:3] |
388 | return ret | 392 | return ret |
389 | 393 | ||
394 | def _git_modified(repodir): | ||
395 | """List the difference between HEAD and the index""" | ||
396 | import bb.process | ||
397 | cmd = ['git', 'status', '--porcelain'] | ||
398 | out, _ = bb.process.run(cmd, cwd=repodir) | ||
399 | ret = [] | ||
400 | if out: | ||
401 | for line in out.split("\n"): | ||
402 | if line and not line.startswith('??'): | ||
403 | ret.append(line[3:]) | ||
404 | return ret | ||
405 | |||
406 | |||
390 | def _git_exclude_path(srctree, path): | 407 | def _git_exclude_path(srctree, path): |
391 | """Return pathspec (list of paths) that excludes certain path""" | 408 | """Return pathspec (list of paths) that excludes certain path""" |
392 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - | 409 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - |
@@ -414,8 +431,6 @@ def _ls_tree(directory): | |||
414 | 431 | ||
415 | def extract(args, config, basepath, workspace): | 432 | def extract(args, config, basepath, workspace): |
416 | """Entry point for the devtool 'extract' subcommand""" | 433 | """Entry point for the devtool 'extract' subcommand""" |
417 | import bb | ||
418 | |||
419 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 434 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
420 | if not tinfoil: | 435 | if not tinfoil: |
421 | # Error already shown | 436 | # Error already shown |
@@ -438,8 +453,6 @@ def extract(args, config, basepath, workspace): | |||
438 | 453 | ||
439 | def sync(args, config, basepath, workspace): | 454 | def sync(args, config, basepath, workspace): |
440 | """Entry point for the devtool 'sync' subcommand""" | 455 | """Entry point for the devtool 'sync' subcommand""" |
441 | import bb | ||
442 | |||
443 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 456 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
444 | if not tinfoil: | 457 | if not tinfoil: |
445 | # Error already shown | 458 | # Error already shown |
@@ -460,37 +473,11 @@ def sync(args, config, basepath, workspace): | |||
460 | finally: | 473 | finally: |
461 | tinfoil.shutdown() | 474 | tinfoil.shutdown() |
462 | 475 | ||
463 | def symlink_oelocal_files_srctree(rd, srctree): | ||
464 | import oe.patch | ||
465 | if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')): | ||
466 | # If recipe extracts to ${WORKDIR}, symlink the files into the srctree | ||
467 | # (otherwise the recipe won't build as expected) | ||
468 | local_files_dir = os.path.join(srctree, 'oe-local-files') | ||
469 | addfiles = [] | ||
470 | for root, _, files in os.walk(local_files_dir): | ||
471 | relpth = os.path.relpath(root, local_files_dir) | ||
472 | if relpth != '.': | ||
473 | bb.utils.mkdirhier(os.path.join(srctree, relpth)) | ||
474 | for fn in files: | ||
475 | if fn == '.gitignore': | ||
476 | continue | ||
477 | destpth = os.path.join(srctree, relpth, fn) | ||
478 | if os.path.exists(destpth): | ||
479 | os.unlink(destpth) | ||
480 | if relpth != '.': | ||
481 | back_relpth = os.path.relpath(local_files_dir, root) | ||
482 | os.symlink('%s/oe-local-files/%s/%s' % (back_relpth, relpth, fn), destpth) | ||
483 | else: | ||
484 | os.symlink('oe-local-files/%s' % fn, destpth) | ||
485 | addfiles.append(os.path.join(relpth, fn)) | ||
486 | if addfiles: | ||
487 | oe.patch.GitApplyTree.commitIgnored("Add local file symlinks", dir=srctree, files=addfiles, d=rd) | ||
488 | |||
489 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): | 476 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): |
490 | """Extract sources of a recipe""" | 477 | """Extract sources of a recipe""" |
491 | import oe.recipeutils | ||
492 | import oe.patch | ||
493 | import oe.path | 478 | import oe.path |
479 | import bb.data | ||
480 | import bb.process | ||
494 | 481 | ||
495 | pn = d.getVar('PN') | 482 | pn = d.getVar('PN') |
496 | 483 | ||
@@ -555,6 +542,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
555 | tempbasedir = d.getVar('WORKDIR') | 542 | tempbasedir = d.getVar('WORKDIR') |
556 | bb.utils.mkdirhier(tempbasedir) | 543 | bb.utils.mkdirhier(tempbasedir) |
557 | tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) | 544 | tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) |
545 | appendbackup = None | ||
558 | try: | 546 | try: |
559 | tinfoil.logger.setLevel(logging.WARNING) | 547 | tinfoil.logger.setLevel(logging.WARNING) |
560 | 548 | ||
@@ -565,7 +553,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
565 | appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') | 553 | appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') |
566 | shutil.copyfile(appendfile, appendbackup) | 554 | shutil.copyfile(appendfile, appendbackup) |
567 | else: | 555 | else: |
568 | appendbackup = None | ||
569 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 556 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
570 | logger.debug('writing append file %s' % appendfile) | 557 | logger.debug('writing append file %s' % appendfile) |
571 | with open(appendfile, 'a') as f: | 558 | with open(appendfile, 'a') as f: |
@@ -638,7 +625,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
638 | srcsubdir = f.read() | 625 | srcsubdir = f.read() |
639 | except FileNotFoundError as e: | 626 | except FileNotFoundError as e: |
640 | raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) | 627 | raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) |
641 | srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir')) | 628 | srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir', os.path.relpath(d.getVar('UNPACKDIR'), d.getVar('WORKDIR')))) |
642 | 629 | ||
643 | # Check if work-shared is empty, if yes | 630 | # Check if work-shared is empty, if yes |
644 | # find source and copy to work-shared | 631 | # find source and copy to work-shared |
@@ -657,35 +644,22 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
657 | elif not os.path.exists(workshareddir): | 644 | elif not os.path.exists(workshareddir): |
658 | oe.path.copyhardlinktree(srcsubdir, workshareddir) | 645 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
659 | 646 | ||
660 | tempdir_localdir = os.path.join(tempdir, 'oe-local-files') | ||
661 | srctree_localdir = os.path.join(srctree, 'oe-local-files') | ||
662 | |||
663 | if sync: | 647 | if sync: |
664 | bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) | 648 | try: |
665 | 649 | logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) | |
666 | # Move the oe-local-files directory to srctree. | 650 | bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree) |
667 | # As oe-local-files is not part of the constructed git tree, | 651 | |
668 | # removing it directly during the synchronization might surprise | 652 | # Use git fetch to update the source with the current recipe |
669 | # the user. Instead, we move it to oe-local-files.bak and remind | 653 | # To be able to update the currently checked out branch with |
670 | # the user in the log message. | 654 | # possibly new history (no fast-forward) git needs to be told |
671 | if os.path.exists(srctree_localdir + '.bak'): | 655 | # that's ok |
672 | shutil.rmtree(srctree_localdir + '.bak') | 656 | logger.info('Syncing source files including patches to git branch: %s' % devbranch) |
673 | 657 | bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) | |
674 | if os.path.exists(srctree_localdir): | 658 | except bb.process.ExecutionError as e: |
675 | logger.info('Backing up current local file directory %s' % srctree_localdir) | 659 | raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) |
676 | shutil.move(srctree_localdir, srctree_localdir + '.bak') | ||
677 | |||
678 | if os.path.exists(tempdir_localdir): | ||
679 | logger.info('Syncing local source files to srctree...') | ||
680 | shutil.copytree(tempdir_localdir, srctree_localdir) | ||
681 | else: | ||
682 | # Move oe-local-files directory to srctree | ||
683 | if os.path.exists(tempdir_localdir): | ||
684 | logger.info('Adding local source files to srctree...') | ||
685 | shutil.move(tempdir_localdir, srcsubdir) | ||
686 | 660 | ||
661 | else: | ||
687 | shutil.move(srcsubdir, srctree) | 662 | shutil.move(srcsubdir, srctree) |
688 | symlink_oelocal_files_srctree(d, srctree) | ||
689 | 663 | ||
690 | if is_kernel_yocto: | 664 | if is_kernel_yocto: |
691 | logger.info('Copying kernel config to srctree') | 665 | logger.info('Copying kernel config to srctree') |
@@ -704,8 +678,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
704 | 678 | ||
705 | def _add_md5(config, recipename, filename): | 679 | def _add_md5(config, recipename, filename): |
706 | """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" | 680 | """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" |
707 | import bb.utils | ||
708 | |||
709 | def addfile(fn): | 681 | def addfile(fn): |
710 | md5 = bb.utils.md5_file(fn) | 682 | md5 = bb.utils.md5_file(fn) |
711 | with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: | 683 | with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: |
@@ -724,7 +696,6 @@ def _add_md5(config, recipename, filename): | |||
724 | def _check_preserve(config, recipename): | 696 | def _check_preserve(config, recipename): |
725 | """Check if a file was manually changed and needs to be saved in 'attic' | 697 | """Check if a file was manually changed and needs to be saved in 'attic' |
726 | directory""" | 698 | directory""" |
727 | import bb.utils | ||
728 | origfile = os.path.join(config.workspace_path, '.devtool_md5') | 699 | origfile = os.path.join(config.workspace_path, '.devtool_md5') |
729 | newfile = os.path.join(config.workspace_path, '.devtool_md5_new') | 700 | newfile = os.path.join(config.workspace_path, '.devtool_md5_new') |
730 | preservepath = os.path.join(config.workspace_path, 'attic', recipename) | 701 | preservepath = os.path.join(config.workspace_path, 'attic', recipename) |
@@ -755,36 +726,36 @@ def _check_preserve(config, recipename): | |||
755 | 726 | ||
756 | def get_staging_kver(srcdir): | 727 | def get_staging_kver(srcdir): |
757 | # Kernel version from work-shared | 728 | # Kernel version from work-shared |
758 | kerver = [] | 729 | import itertools |
759 | staging_kerVer="" | 730 | try: |
760 | if os.path.exists(srcdir) and os.listdir(srcdir): | ||
761 | with open(os.path.join(srcdir, "Makefile")) as f: | 731 | with open(os.path.join(srcdir, "Makefile")) as f: |
762 | version = [next(f) for x in range(5)][1:4] | 732 | # Take VERSION, PATCHLEVEL, SUBLEVEL from lines 1, 2, 3 |
763 | for word in version: | 733 | return ".".join(line.rstrip().split('= ')[1] for line in itertools.islice(f, 1, 4)) |
764 | kerver.append(word.split('= ')[1].split('\n')[0]) | 734 | except FileNotFoundError: |
765 | staging_kerVer = ".".join(kerver) | 735 | return "" |
766 | return staging_kerVer | ||
767 | 736 | ||
768 | def get_staging_kbranch(srcdir): | 737 | def get_staging_kbranch(srcdir): |
738 | import bb.process | ||
769 | staging_kbranch = "" | 739 | staging_kbranch = "" |
770 | if os.path.exists(srcdir) and os.listdir(srcdir): | 740 | if os.path.exists(srcdir) and os.listdir(srcdir): |
771 | (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir) | 741 | (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir) |
772 | staging_kbranch = "".join(branch.split('\n')[0]) | 742 | staging_kbranch = "".join(branch.split('\n')[0]) |
773 | return staging_kbranch | 743 | return staging_kbranch |
774 | 744 | ||
775 | def get_real_srctree(srctree, s, workdir): | 745 | def get_real_srctree(srctree, s, unpackdir): |
776 | # Check that recipe isn't using a shared workdir | 746 | # Check that recipe isn't using a shared workdir |
777 | s = os.path.abspath(s) | 747 | s = os.path.abspath(s) |
778 | workdir = os.path.abspath(workdir) | 748 | unpackdir = os.path.abspath(unpackdir) |
779 | if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: | 749 | if s.startswith(unpackdir) and s != unpackdir and os.path.dirname(s) != unpackdir: |
780 | # Handle if S is set to a subdirectory of the source | 750 | # Handle if S is set to a subdirectory of the source |
781 | srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] | 751 | srcsubdir = os.path.relpath(s, unpackdir).split(os.sep, 1)[1] |
782 | srctree = os.path.join(srctree, srcsubdir) | 752 | srctree = os.path.join(srctree, srcsubdir) |
783 | return srctree | 753 | return srctree |
784 | 754 | ||
785 | def modify(args, config, basepath, workspace): | 755 | def modify(args, config, basepath, workspace): |
786 | """Entry point for the devtool 'modify' subcommand""" | 756 | """Entry point for the devtool 'modify' subcommand""" |
787 | import bb | 757 | import bb.data |
758 | import bb.process | ||
788 | import oe.recipeutils | 759 | import oe.recipeutils |
789 | import oe.patch | 760 | import oe.patch |
790 | import oe.path | 761 | import oe.path |
@@ -840,35 +811,21 @@ def modify(args, config, basepath, workspace): | |||
840 | staging_kbranch = get_staging_kbranch(srcdir) | 811 | staging_kbranch = get_staging_kbranch(srcdir) |
841 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): | 812 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): |
842 | oe.path.copyhardlinktree(srcdir, srctree) | 813 | oe.path.copyhardlinktree(srcdir, srctree) |
843 | workdir = rd.getVar('WORKDIR') | 814 | unpackdir = rd.getVar('UNPACKDIR') |
844 | srcsubdir = rd.getVar('S') | 815 | srcsubdir = rd.getVar('S') |
845 | localfilesdir = os.path.join(srctree, 'oe-local-files') | ||
846 | # Move local source files into separate subdir | ||
847 | recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)] | ||
848 | local_files = oe.recipeutils.get_recipe_local_files(rd) | ||
849 | 816 | ||
850 | for key in local_files.copy(): | 817 | # Add locally copied files to gitignore as we add back to the metadata directly |
851 | if key.endswith('scc'): | 818 | local_files = oe.recipeutils.get_recipe_local_files(rd) |
852 | sccfile = open(local_files[key], 'r') | ||
853 | for l in sccfile: | ||
854 | line = l.split() | ||
855 | if line and line[0] in ('kconf', 'patch'): | ||
856 | cfg = os.path.join(os.path.dirname(local_files[key]), line[-1]) | ||
857 | if not cfg in local_files.values(): | ||
858 | local_files[line[-1]] = cfg | ||
859 | shutil.copy2(cfg, workdir) | ||
860 | sccfile.close() | ||
861 | |||
862 | # Ignore local files with subdir={BP} | ||
863 | srcabspath = os.path.abspath(srcsubdir) | 819 | srcabspath = os.path.abspath(srcsubdir) |
864 | local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))] | 820 | local_files = [fname for fname in local_files if |
821 | os.path.exists(os.path.join(unpackdir, fname)) and | ||
822 | srcabspath == unpackdir] | ||
865 | if local_files: | 823 | if local_files: |
866 | for fname in local_files: | 824 | with open(os.path.join(srctree, '.gitignore'), 'a+') as f: |
867 | _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) | 825 | f.write('# Ignore local files, by default. Remove following lines' |
868 | with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: | 826 | 'if you want to commit the directory to Git\n') |
869 | f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n*\n') | 827 | for fname in local_files: |
870 | 828 | f.write('%s\n' % fname) | |
871 | symlink_oelocal_files_srctree(rd, srctree) | ||
872 | 829 | ||
873 | task = 'do_configure' | 830 | task = 'do_configure' |
874 | res = tinfoil.build_targets(pn, task, handle_events=True) | 831 | res = tinfoil.build_targets(pn, task, handle_events=True) |
@@ -893,7 +850,10 @@ def modify(args, config, basepath, workspace): | |||
893 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree) | 850 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree) |
894 | commits["."] = stdout.split() | 851 | commits["."] = stdout.split() |
895 | check_commits = True | 852 | check_commits = True |
896 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree) | 853 | try: |
854 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree) | ||
855 | except bb.process.ExecutionError: | ||
856 | stdout = "" | ||
897 | for line in stdout.splitlines(): | 857 | for line in stdout.splitlines(): |
898 | (rev, submodule_path) = line.split() | 858 | (rev, submodule_path) = line.split() |
899 | submodule = os.path.relpath(submodule_path, srctree) | 859 | submodule = os.path.relpath(submodule_path, srctree) |
@@ -947,7 +907,7 @@ def modify(args, config, basepath, workspace): | |||
947 | 907 | ||
948 | # Need to grab this here in case the source is within a subdirectory | 908 | # Need to grab this here in case the source is within a subdirectory |
949 | srctreebase = srctree | 909 | srctreebase = srctree |
950 | srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) | 910 | srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR')) |
951 | 911 | ||
952 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 912 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
953 | with open(appendfile, 'w') as f: | 913 | with open(appendfile, 'w') as f: |
@@ -987,13 +947,6 @@ def modify(args, config, basepath, workspace): | |||
987 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) | 947 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) |
988 | 948 | ||
989 | if bb.data.inherits_class('kernel', rd): | 949 | if bb.data.inherits_class('kernel', rd): |
990 | f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' | ||
991 | 'do_fetch do_unpack do_kernel_configcheck"\n') | ||
992 | f.write('\ndo_patch[noexec] = "1"\n') | ||
993 | f.write('\ndo_configure:append() {\n' | ||
994 | ' cp ${B}/.config ${S}/.config.baseline\n' | ||
995 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | ||
996 | '}\n') | ||
997 | f.write('\ndo_kernel_configme:prepend() {\n' | 950 | f.write('\ndo_kernel_configme:prepend() {\n' |
998 | ' if [ -e ${S}/.config ]; then\n' | 951 | ' if [ -e ${S}/.config ]; then\n' |
999 | ' mv ${S}/.config ${S}/.config.old\n' | 952 | ' mv ${S}/.config ${S}/.config.old\n' |
@@ -1017,6 +970,8 @@ def modify(args, config, basepath, workspace): | |||
1017 | if branch == args.branch: | 970 | if branch == args.branch: |
1018 | continue | 971 | continue |
1019 | f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) | 972 | f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) |
973 | if args.debug_build: | ||
974 | f.write('\nDEBUG_BUILD = "1"\n') | ||
1020 | 975 | ||
1021 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) | 976 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) |
1022 | 977 | ||
@@ -1061,6 +1016,7 @@ def rename(args, config, basepath, workspace): | |||
1061 | origfnver = '' | 1016 | origfnver = '' |
1062 | 1017 | ||
1063 | recipefilemd5 = None | 1018 | recipefilemd5 = None |
1019 | newrecipefilemd5 = None | ||
1064 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 1020 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
1065 | try: | 1021 | try: |
1066 | rd = parse_recipe(config, tinfoil, args.recipename, True) | 1022 | rd = parse_recipe(config, tinfoil, args.recipename, True) |
@@ -1138,6 +1094,7 @@ def rename(args, config, basepath, workspace): | |||
1138 | 1094 | ||
1139 | # Rename source tree if it's the default path | 1095 | # Rename source tree if it's the default path |
1140 | appendmd5 = None | 1096 | appendmd5 = None |
1097 | newappendmd5 = None | ||
1141 | if not args.no_srctree: | 1098 | if not args.no_srctree: |
1142 | srctree = workspace[args.recipename]['srctree'] | 1099 | srctree = workspace[args.recipename]['srctree'] |
1143 | if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): | 1100 | if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): |
@@ -1226,7 +1183,7 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre | |||
1226 | """Get initial and update rev of a recipe. These are the start point of the | 1183 | """Get initial and update rev of a recipe. These are the start point of the |
1227 | whole patchset and start point for the patches to be re-generated/updated. | 1184 | whole patchset and start point for the patches to be re-generated/updated. |
1228 | """ | 1185 | """ |
1229 | import bb | 1186 | import bb.process |
1230 | 1187 | ||
1231 | # Get current branch | 1188 | # Get current branch |
1232 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', | 1189 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', |
@@ -1352,6 +1309,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): | |||
1352 | """ | 1309 | """ |
1353 | import oe.recipeutils | 1310 | import oe.recipeutils |
1354 | from oe.patch import GitApplyTree | 1311 | from oe.patch import GitApplyTree |
1312 | import bb.process | ||
1355 | updated = OrderedDict() | 1313 | updated = OrderedDict() |
1356 | added = OrderedDict() | 1314 | added = OrderedDict() |
1357 | seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') | 1315 | seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') |
@@ -1373,6 +1331,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): | |||
1373 | # values, but they ought to be anyway... | 1331 | # values, but they ought to be anyway... |
1374 | new_basename = seqpatch_re.match(new_patch).group(2) | 1332 | new_basename = seqpatch_re.match(new_patch).group(2) |
1375 | match_name = None | 1333 | match_name = None |
1334 | old_patch = None | ||
1376 | for old_patch in existing_patches: | 1335 | for old_patch in existing_patches: |
1377 | old_basename = seqpatch_re.match(old_patch).group(2) | 1336 | old_basename = seqpatch_re.match(old_patch).group(2) |
1378 | old_basename_splitext = os.path.splitext(old_basename) | 1337 | old_basename_splitext = os.path.splitext(old_basename) |
@@ -1421,6 +1380,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): | |||
1421 | 1380 | ||
1422 | def _create_kconfig_diff(srctree, rd, outfile): | 1381 | def _create_kconfig_diff(srctree, rd, outfile): |
1423 | """Create a kconfig fragment""" | 1382 | """Create a kconfig fragment""" |
1383 | import bb.process | ||
1424 | # Only update config fragment if both config files exist | 1384 | # Only update config fragment if both config files exist |
1425 | orig_config = os.path.join(srctree, '.config.baseline') | 1385 | orig_config = os.path.join(srctree, '.config.baseline') |
1426 | new_config = os.path.join(srctree, '.config.new') | 1386 | new_config = os.path.join(srctree, '.config.new') |
@@ -1452,16 +1412,21 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1452 | 1. updated - files that already exist in SRCURI | 1412 | 1. updated - files that already exist in SRCURI |
1453 | 2. added - new files files that don't exist in SRCURI | 1413 | 2. added - new files files that don't exist in SRCURI |
1454 | 3 removed - files that exist in SRCURI but not in exported files | 1414 | 3 removed - files that exist in SRCURI but not in exported files |
1455 | In each dict the key is the 'basepath' of the URI and value is the | 1415 | In each dict the key is the 'basepath' of the URI and value is: |
1456 | absolute path to the existing file in recipe space (if any). | 1416 | - for updated and added dicts, a dict with 1 optionnal key: |
1417 | - 'path': the absolute path to the existing file in recipe space (if any) | ||
1418 | - for removed dict, the absolute path to the existing file in recipe space | ||
1457 | """ | 1419 | """ |
1458 | import oe.recipeutils | 1420 | import oe.recipeutils |
1421 | import bb.data | ||
1422 | import bb.process | ||
1459 | 1423 | ||
1460 | # Find out local files (SRC_URI files that exist in the "recipe space"). | 1424 | # Find out local files (SRC_URI files that exist in the "recipe space"). |
1461 | # Local files that reside in srctree are not included in patch generation. | 1425 | # Local files that reside in srctree are not included in patch generation. |
1462 | # Instead they are directly copied over the original source files (in | 1426 | # Instead they are directly copied over the original source files (in |
1463 | # recipe space). | 1427 | # recipe space). |
1464 | existing_files = oe.recipeutils.get_recipe_local_files(rd) | 1428 | existing_files = oe.recipeutils.get_recipe_local_files(rd) |
1429 | |||
1465 | new_set = None | 1430 | new_set = None |
1466 | updated = OrderedDict() | 1431 | updated = OrderedDict() |
1467 | added = OrderedDict() | 1432 | added = OrderedDict() |
@@ -1478,24 +1443,28 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1478 | if branchname.startswith(override_branch_prefix): | 1443 | if branchname.startswith(override_branch_prefix): |
1479 | return (updated, added, removed) | 1444 | return (updated, added, removed) |
1480 | 1445 | ||
1481 | local_files_dir = os.path.join(srctreebase, 'oe-local-files') | 1446 | files = _git_modified(srctree) |
1482 | git_files = _git_ls_tree(srctree) | 1447 | #if not files: |
1483 | if 'oe-local-files' in git_files: | 1448 | # files = _ls_tree(srctree) |
1484 | # If tracked by Git, take the files from srctree HEAD. First get | 1449 | for f in files: |
1485 | # the tree object of the directory | 1450 | fullfile = os.path.join(srctree, f) |
1486 | tmp_index = os.path.join(srctree, '.git', 'index.tmp.devtool') | 1451 | if os.path.exists(os.path.join(fullfile, ".git")): |
1487 | tree = git_files['oe-local-files'][2] | 1452 | # submodules handled elsewhere |
1488 | bb.process.run(['git', 'checkout', tree, '--', '.'], cwd=srctree, | 1453 | continue |
1489 | env=dict(os.environ, GIT_WORK_TREE=destdir, | 1454 | if f not in existing_files: |
1490 | GIT_INDEX_FILE=tmp_index)) | 1455 | added[f] = {} |
1491 | new_set = list(_git_ls_tree(srctree, tree, True).keys()) | 1456 | if os.path.isdir(os.path.join(srctree, f)): |
1492 | elif os.path.isdir(local_files_dir): | 1457 | shutil.copytree(fullfile, os.path.join(destdir, f)) |
1493 | # If not tracked by Git, just copy from working copy | 1458 | else: |
1494 | new_set = _ls_tree(local_files_dir) | 1459 | shutil.copy2(fullfile, os.path.join(destdir, f)) |
1495 | bb.process.run(['cp', '-ax', | 1460 | elif not os.path.exists(fullfile): |
1496 | os.path.join(local_files_dir, '.'), destdir]) | 1461 | removed[f] = existing_files[f] |
1497 | else: | 1462 | elif f in existing_files: |
1498 | new_set = [] | 1463 | updated[f] = {'path' : existing_files[f]} |
1464 | if os.path.isdir(os.path.join(srctree, f)): | ||
1465 | shutil.copytree(fullfile, os.path.join(destdir, f)) | ||
1466 | else: | ||
1467 | shutil.copy2(fullfile, os.path.join(destdir, f)) | ||
1499 | 1468 | ||
1500 | # Special handling for kernel config | 1469 | # Special handling for kernel config |
1501 | if bb.data.inherits_class('kernel-yocto', rd): | 1470 | if bb.data.inherits_class('kernel-yocto', rd): |
@@ -1503,17 +1472,14 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1503 | fragment_path = os.path.join(destdir, fragment_fn) | 1472 | fragment_path = os.path.join(destdir, fragment_fn) |
1504 | if _create_kconfig_diff(srctree, rd, fragment_path): | 1473 | if _create_kconfig_diff(srctree, rd, fragment_path): |
1505 | if os.path.exists(fragment_path): | 1474 | if os.path.exists(fragment_path): |
1506 | if fragment_fn not in new_set: | 1475 | if fragment_fn in removed: |
1507 | new_set.append(fragment_fn) | 1476 | del removed[fragment_fn] |
1508 | # Copy fragment to local-files | 1477 | if fragment_fn not in updated and fragment_fn not in added: |
1509 | if os.path.isdir(local_files_dir): | 1478 | added[fragment_fn] = {} |
1510 | shutil.copy2(fragment_path, local_files_dir) | ||
1511 | else: | 1479 | else: |
1512 | if fragment_fn in new_set: | 1480 | if fragment_fn in updated: |
1513 | new_set.remove(fragment_fn) | 1481 | removed[fragment_fn] = updated[fragment_fn] |
1514 | # Remove fragment from local-files | 1482 | del updated[fragment_fn] |
1515 | if os.path.exists(os.path.join(local_files_dir, fragment_fn)): | ||
1516 | os.unlink(os.path.join(local_files_dir, fragment_fn)) | ||
1517 | 1483 | ||
1518 | # Special handling for cml1, ccmake, etc bbclasses that generated | 1484 | # Special handling for cml1, ccmake, etc bbclasses that generated |
1519 | # configuration fragment files that are consumed as source files | 1485 | # configuration fragment files that are consumed as source files |
@@ -1521,42 +1487,13 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1521 | if bb.data.inherits_class(frag_class, rd): | 1487 | if bb.data.inherits_class(frag_class, rd): |
1522 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) | 1488 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) |
1523 | if os.path.exists(srcpath): | 1489 | if os.path.exists(srcpath): |
1524 | if frag_name not in new_set: | 1490 | if frag_name in removed: |
1525 | new_set.append(frag_name) | 1491 | del removed[frag_name] |
1492 | if frag_name not in updated: | ||
1493 | added[frag_name] = {} | ||
1526 | # copy fragment into destdir | 1494 | # copy fragment into destdir |
1527 | shutil.copy2(srcpath, destdir) | 1495 | shutil.copy2(srcpath, destdir) |
1528 | # copy fragment into local files if exists | 1496 | |
1529 | if os.path.isdir(local_files_dir): | ||
1530 | shutil.copy2(srcpath, local_files_dir) | ||
1531 | |||
1532 | if new_set is not None: | ||
1533 | for fname in new_set: | ||
1534 | if fname in existing_files: | ||
1535 | origpath = existing_files.pop(fname) | ||
1536 | workpath = os.path.join(local_files_dir, fname) | ||
1537 | if not filecmp.cmp(origpath, workpath): | ||
1538 | updated[fname] = origpath | ||
1539 | elif fname != '.gitignore': | ||
1540 | added[fname] = None | ||
1541 | |||
1542 | workdir = rd.getVar('WORKDIR') | ||
1543 | s = rd.getVar('S') | ||
1544 | if not s.endswith(os.sep): | ||
1545 | s += os.sep | ||
1546 | |||
1547 | if workdir != s: | ||
1548 | # Handle files where subdir= was specified | ||
1549 | for fname in list(existing_files.keys()): | ||
1550 | # FIXME handle both subdir starting with BP and not? | ||
1551 | fworkpath = os.path.join(workdir, fname) | ||
1552 | if fworkpath.startswith(s): | ||
1553 | fpath = os.path.join(srctree, os.path.relpath(fworkpath, s)) | ||
1554 | if os.path.exists(fpath): | ||
1555 | origpath = existing_files.pop(fname) | ||
1556 | if not filecmp.cmp(origpath, fpath): | ||
1557 | updated[fpath] = origpath | ||
1558 | |||
1559 | removed = existing_files | ||
1560 | return (updated, added, removed) | 1497 | return (updated, added, removed) |
1561 | 1498 | ||
1562 | 1499 | ||
@@ -1574,7 +1511,7 @@ def _determine_files_dir(rd): | |||
1574 | 1511 | ||
1575 | def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): | 1512 | def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): |
1576 | """Implement the 'srcrev' mode of update-recipe""" | 1513 | """Implement the 'srcrev' mode of update-recipe""" |
1577 | import bb | 1514 | import bb.process |
1578 | import oe.recipeutils | 1515 | import oe.recipeutils |
1579 | 1516 | ||
1580 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' | 1517 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' |
@@ -1612,6 +1549,7 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1612 | local_files_dir = tempfile.mkdtemp(dir=tempdir) | 1549 | local_files_dir = tempfile.mkdtemp(dir=tempdir) |
1613 | srctreebase = workspace[recipename]['srctreebase'] | 1550 | srctreebase = workspace[recipename]['srctreebase'] |
1614 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) | 1551 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) |
1552 | removedentries = {} | ||
1615 | if not no_remove: | 1553 | if not no_remove: |
1616 | # Find list of existing patches in recipe file | 1554 | # Find list of existing patches in recipe file |
1617 | patches_dir = tempfile.mkdtemp(dir=tempdir) | 1555 | patches_dir = tempfile.mkdtemp(dir=tempdir) |
@@ -1640,7 +1578,8 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1640 | redirect_output=dry_run_outdir) | 1578 | redirect_output=dry_run_outdir) |
1641 | else: | 1579 | else: |
1642 | files_dir = _determine_files_dir(rd) | 1580 | files_dir = _determine_files_dir(rd) |
1643 | for basepath, path in upd_f.items(): | 1581 | for basepath, param in upd_f.items(): |
1582 | path = param['path'] | ||
1644 | logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) | 1583 | logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) |
1645 | if os.path.isabs(basepath): | 1584 | if os.path.isabs(basepath): |
1646 | # Original file (probably with subdir pointing inside source tree) | 1585 | # Original file (probably with subdir pointing inside source tree) |
@@ -1650,7 +1589,8 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1650 | _move_file(os.path.join(local_files_dir, basepath), path, | 1589 | _move_file(os.path.join(local_files_dir, basepath), path, |
1651 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1590 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1652 | update_srcuri= True | 1591 | update_srcuri= True |
1653 | for basepath, path in new_f.items(): | 1592 | for basepath, param in new_f.items(): |
1593 | path = param['path'] | ||
1654 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) | 1594 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) |
1655 | _move_file(os.path.join(local_files_dir, basepath), | 1595 | _move_file(os.path.join(local_files_dir, basepath), |
1656 | os.path.join(files_dir, basepath), | 1596 | os.path.join(files_dir, basepath), |
@@ -1673,7 +1613,6 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1673 | 1613 | ||
1674 | def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): | 1614 | def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): |
1675 | """Implement the 'patch' mode of update-recipe""" | 1615 | """Implement the 'patch' mode of update-recipe""" |
1676 | import bb | ||
1677 | import oe.recipeutils | 1616 | import oe.recipeutils |
1678 | 1617 | ||
1679 | recipefile = rd.getVar('FILE') | 1618 | recipefile = rd.getVar('FILE') |
@@ -1772,7 +1711,8 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1772 | else: | 1711 | else: |
1773 | # Update existing files | 1712 | # Update existing files |
1774 | files_dir = _determine_files_dir(rd) | 1713 | files_dir = _determine_files_dir(rd) |
1775 | for basepath, path in upd_f.items(): | 1714 | for basepath, param in upd_f.items(): |
1715 | path = param['path'] | ||
1776 | logger.info('Updating file %s' % basepath) | 1716 | logger.info('Updating file %s' % basepath) |
1777 | if os.path.isabs(basepath): | 1717 | if os.path.isabs(basepath): |
1778 | # Original file (probably with subdir pointing inside source tree) | 1718 | # Original file (probably with subdir pointing inside source tree) |
@@ -1786,6 +1726,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1786 | for basepath, param in upd_p.items(): | 1726 | for basepath, param in upd_p.items(): |
1787 | path = param['path'] | 1727 | path = param['path'] |
1788 | patchdir = param.get('patchdir', ".") | 1728 | patchdir = param.get('patchdir', ".") |
1729 | patchdir_param = {} | ||
1789 | if patchdir != "." : | 1730 | if patchdir != "." : |
1790 | patchdir_param = dict(patchdir_params) | 1731 | patchdir_param = dict(patchdir_params) |
1791 | if patchdir_param: | 1732 | if patchdir_param: |
@@ -1806,7 +1747,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1806 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1747 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1807 | updatefiles = True | 1748 | updatefiles = True |
1808 | # Add any new files | 1749 | # Add any new files |
1809 | for basepath, path in new_f.items(): | 1750 | for basepath, param in new_f.items(): |
1810 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) | 1751 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) |
1811 | _move_file(os.path.join(local_files_dir, basepath), | 1752 | _move_file(os.path.join(local_files_dir, basepath), |
1812 | os.path.join(files_dir, basepath), | 1753 | os.path.join(files_dir, basepath), |
@@ -1851,6 +1792,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1851 | 1792 | ||
1852 | def _guess_recipe_update_mode(srctree, rdata): | 1793 | def _guess_recipe_update_mode(srctree, rdata): |
1853 | """Guess the recipe update mode to use""" | 1794 | """Guess the recipe update mode to use""" |
1795 | import bb.process | ||
1854 | src_uri = (rdata.getVar('SRC_URI') or '').split() | 1796 | src_uri = (rdata.getVar('SRC_URI') or '').split() |
1855 | git_uris = [uri for uri in src_uri if uri.startswith('git://')] | 1797 | git_uris = [uri for uri in src_uri if uri.startswith('git://')] |
1856 | if not git_uris: | 1798 | if not git_uris: |
@@ -1872,6 +1814,8 @@ def _guess_recipe_update_mode(srctree, rdata): | |||
1872 | return 'patch' | 1814 | return 'patch' |
1873 | 1815 | ||
1874 | def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): | 1816 | def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): |
1817 | import bb.data | ||
1818 | import bb.process | ||
1875 | srctree = workspace[recipename]['srctree'] | 1819 | srctree = workspace[recipename]['srctree'] |
1876 | if mode == 'auto': | 1820 | if mode == 'auto': |
1877 | mode = _guess_recipe_update_mode(srctree, rd) | 1821 | mode = _guess_recipe_update_mode(srctree, rd) |
@@ -1994,6 +1938,7 @@ def status(args, config, basepath, workspace): | |||
1994 | 1938 | ||
1995 | def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | 1939 | def _reset(recipes, no_clean, remove_work, config, basepath, workspace): |
1996 | """Reset one or more recipes""" | 1940 | """Reset one or more recipes""" |
1941 | import bb.process | ||
1997 | import oe.path | 1942 | import oe.path |
1998 | 1943 | ||
1999 | def clean_preferred_provider(pn, layerconf_path): | 1944 | def clean_preferred_provider(pn, layerconf_path): |
@@ -2006,7 +1951,7 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
2006 | lines = f.readlines() | 1951 | lines = f.readlines() |
2007 | with open(new_layerconf_file, 'a') as nf: | 1952 | with open(new_layerconf_file, 'a') as nf: |
2008 | for line in lines: | 1953 | for line in lines: |
2009 | pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + pn + r'"$' | 1954 | pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + re.escape(pn) + r'"$' |
2010 | if not re.match(pprovider_exp, line): | 1955 | if not re.match(pprovider_exp, line): |
2011 | nf.write(line) | 1956 | nf.write(line) |
2012 | else: | 1957 | else: |
@@ -2097,8 +2042,6 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
2097 | 2042 | ||
2098 | def reset(args, config, basepath, workspace): | 2043 | def reset(args, config, basepath, workspace): |
2099 | """Entry point for the devtool 'reset' subcommand""" | 2044 | """Entry point for the devtool 'reset' subcommand""" |
2100 | import bb | ||
2101 | import shutil | ||
2102 | 2045 | ||
2103 | recipes = "" | 2046 | recipes = "" |
2104 | 2047 | ||
@@ -2377,6 +2320,7 @@ def register_commands(subparsers, context): | |||
2377 | parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') | 2320 | parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') |
2378 | parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') | 2321 | parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') |
2379 | parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") | 2322 | parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") |
2323 | parser_modify.add_argument('--debug-build', action="store_true", help='Add DEBUG_BUILD = "1" to the modified recipe') | ||
2380 | parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) | 2324 | parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) |
2381 | 2325 | ||
2382 | parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', | 2326 | parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', |
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py index fa5b8ef3c7..d9aca6e2db 100644 --- a/scripts/lib/devtool/upgrade.py +++ b/scripts/lib/devtool/upgrade.py | |||
@@ -32,7 +32,7 @@ def _run(cmd, cwd=''): | |||
32 | 32 | ||
33 | def _get_srctree(tmpdir): | 33 | def _get_srctree(tmpdir): |
34 | srctree = tmpdir | 34 | srctree = tmpdir |
35 | dirs = scriptutils.filter_src_subdirs(tmpdir) | 35 | dirs = os.listdir(tmpdir) |
36 | if len(dirs) == 1: | 36 | if len(dirs) == 1: |
37 | srctree = os.path.join(tmpdir, dirs[0]) | 37 | srctree = os.path.join(tmpdir, dirs[0]) |
38 | else: | 38 | else: |
@@ -76,19 +76,19 @@ def _rename_recipe_dirs(oldpv, newpv, path): | |||
76 | bb.utils.rename(os.path.join(path, oldfile), | 76 | bb.utils.rename(os.path.join(path, oldfile), |
77 | os.path.join(path, newfile)) | 77 | os.path.join(path, newfile)) |
78 | 78 | ||
79 | def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path): | 79 | def _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path): |
80 | oldrecipe = os.path.basename(oldrecipe) | 80 | oldrecipe = os.path.basename(oldrecipe) |
81 | if oldrecipe.endswith('_%s.bb' % oldpv): | 81 | if oldrecipe.endswith('_%s.bb' % oldpv): |
82 | newrecipe = '%s_%s.bb' % (bpn, newpv) | 82 | newrecipe = '%s_%s.bb' % (pn, newpv) |
83 | if oldrecipe != newrecipe: | 83 | if oldrecipe != newrecipe: |
84 | shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) | 84 | shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) |
85 | else: | 85 | else: |
86 | newrecipe = oldrecipe | 86 | newrecipe = oldrecipe |
87 | return os.path.join(path, newrecipe) | 87 | return os.path.join(path, newrecipe) |
88 | 88 | ||
89 | def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path): | 89 | def _rename_recipe_files(oldrecipe, pn, oldpv, newpv, path): |
90 | _rename_recipe_dirs(oldpv, newpv, path) | 90 | _rename_recipe_dirs(oldpv, newpv, path) |
91 | return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path) | 91 | return _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path) |
92 | 92 | ||
93 | def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): | 93 | def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): |
94 | """Writes an append file""" | 94 | """Writes an append file""" |
@@ -169,6 +169,7 @@ def _get_uri(rd): | |||
169 | 169 | ||
170 | def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd): | 170 | def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd): |
171 | """Extract sources of a recipe with a new version""" | 171 | """Extract sources of a recipe with a new version""" |
172 | import oe.patch | ||
172 | 173 | ||
173 | def __run(cmd): | 174 | def __run(cmd): |
174 | """Simple wrapper which calls _run with srctree as cwd""" | 175 | """Simple wrapper which calls _run with srctree as cwd""" |
@@ -187,9 +188,9 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
187 | if uri.startswith('git://') or uri.startswith('gitsm://'): | 188 | if uri.startswith('git://') or uri.startswith('gitsm://'): |
188 | __run('git fetch') | 189 | __run('git fetch') |
189 | __run('git checkout %s' % rev) | 190 | __run('git checkout %s' % rev) |
190 | __run('git tag -f devtool-base-new') | 191 | __run('git tag -f --no-sign devtool-base-new') |
191 | __run('git submodule update --recursive') | 192 | __run('git submodule update --recursive') |
192 | __run('git submodule foreach \'git tag -f devtool-base-new\'') | 193 | __run('git submodule foreach \'git tag -f --no-sign devtool-base-new\'') |
193 | (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'') | 194 | (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'') |
194 | paths += [os.path.join(srctree, p) for p in stdout.splitlines()] | 195 | paths += [os.path.join(srctree, p) for p in stdout.splitlines()] |
195 | checksums = {} | 196 | checksums = {} |
@@ -256,7 +257,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
256 | useroptions = [] | 257 | useroptions = [] |
257 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) | 258 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) |
258 | __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) | 259 | __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) |
259 | __run('git tag -f devtool-base-%s' % newpv) | 260 | __run('git tag -f --no-sign devtool-base-%s' % newpv) |
260 | 261 | ||
261 | revs = {} | 262 | revs = {} |
262 | for path in paths: | 263 | for path in paths: |
@@ -335,19 +336,19 @@ def _add_license_diff_to_recipe(path, diff): | |||
335 | def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): | 336 | def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): |
336 | """Creates the new recipe under workspace""" | 337 | """Creates the new recipe under workspace""" |
337 | 338 | ||
338 | bpn = rd.getVar('BPN') | 339 | pn = rd.getVar('PN') |
339 | path = os.path.join(workspace, 'recipes', bpn) | 340 | path = os.path.join(workspace, 'recipes', pn) |
340 | bb.utils.mkdirhier(path) | 341 | bb.utils.mkdirhier(path) |
341 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) | 342 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) |
342 | if not copied: | 343 | if not copied: |
343 | raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn) | 344 | raise DevtoolError('Internal error - no files were copied for recipe %s' % pn) |
344 | logger.debug('Copied %s to %s' % (copied, path)) | 345 | logger.debug('Copied %s to %s' % (copied, path)) |
345 | 346 | ||
346 | oldpv = rd.getVar('PV') | 347 | oldpv = rd.getVar('PV') |
347 | if not newpv: | 348 | if not newpv: |
348 | newpv = oldpv | 349 | newpv = oldpv |
349 | origpath = rd.getVar('FILE') | 350 | origpath = rd.getVar('FILE') |
350 | fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path) | 351 | fullpath = _rename_recipe_files(origpath, pn, oldpv, newpv, path) |
351 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) | 352 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) |
352 | 353 | ||
353 | newvalues = {} | 354 | newvalues = {} |
@@ -534,6 +535,15 @@ def _generate_license_diff(old_licenses, new_licenses): | |||
534 | diff = diff + line | 535 | diff = diff + line |
535 | return diff | 536 | return diff |
536 | 537 | ||
538 | def _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil): | ||
539 | tasks = [] | ||
540 | for task in (rd.getVar('RECIPE_UPGRADE_EXTRA_TASKS') or '').split(): | ||
541 | logger.info('Running extra recipe upgrade task: %s' % task) | ||
542 | res = tinfoil.build_targets(pn, task, handle_events=True) | ||
543 | |||
544 | if not res: | ||
545 | raise DevtoolError('Running extra recipe upgrade task %s for %s failed' % (task, pn)) | ||
546 | |||
537 | def upgrade(args, config, basepath, workspace): | 547 | def upgrade(args, config, basepath, workspace): |
538 | """Entry point for the devtool 'upgrade' subcommand""" | 548 | """Entry point for the devtool 'upgrade' subcommand""" |
539 | 549 | ||
@@ -561,7 +571,7 @@ def upgrade(args, config, basepath, workspace): | |||
561 | else: | 571 | else: |
562 | srctree = standard.get_default_srctree(config, pn) | 572 | srctree = standard.get_default_srctree(config, pn) |
563 | 573 | ||
564 | srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) | 574 | srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR')) |
565 | 575 | ||
566 | # try to automatically discover latest version and revision if not provided on command line | 576 | # try to automatically discover latest version and revision if not provided on command line |
567 | if not args.version and not args.srcrev: | 577 | if not args.version and not args.srcrev: |
@@ -601,7 +611,7 @@ def upgrade(args, config, basepath, workspace): | |||
601 | license_diff = _generate_license_diff(old_licenses, new_licenses) | 611 | license_diff = _generate_license_diff(old_licenses, new_licenses) |
602 | rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) | 612 | rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) |
603 | except (bb.process.CmdError, DevtoolError) as e: | 613 | except (bb.process.CmdError, DevtoolError) as e: |
604 | recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN')) | 614 | recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('PN')) |
605 | _upgrade_error(e, recipedir, srctree, args.keep_failure) | 615 | _upgrade_error(e, recipedir, srctree, args.keep_failure) |
606 | standard._add_md5(config, pn, os.path.dirname(rf)) | 616 | standard._add_md5(config, pn, os.path.dirname(rf)) |
607 | 617 | ||
@@ -609,6 +619,8 @@ def upgrade(args, config, basepath, workspace): | |||
609 | copied, config.workspace_path, rd) | 619 | copied, config.workspace_path, rd) |
610 | standard._add_md5(config, pn, af) | 620 | standard._add_md5(config, pn, af) |
611 | 621 | ||
622 | _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil) | ||
623 | |||
612 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) | 624 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) |
613 | 625 | ||
614 | logger.info('Upgraded source extracted to %s' % srctree) | 626 | logger.info('Upgraded source extracted to %s' % srctree) |
@@ -643,18 +655,28 @@ def latest_version(args, config, basepath, workspace): | |||
643 | return 0 | 655 | return 0 |
644 | 656 | ||
645 | def check_upgrade_status(args, config, basepath, workspace): | 657 | def check_upgrade_status(args, config, basepath, workspace): |
658 | def _print_status(recipe): | ||
659 | print("{:25} {:15} {:15} {} {} {}".format( recipe['pn'], | ||
660 | recipe['cur_ver'], | ||
661 | recipe['status'] if recipe['status'] != 'UPDATE' else (recipe['next_ver'] if not recipe['next_ver'].endswith("new-commits-available") else "new commits"), | ||
662 | recipe['maintainer'], | ||
663 | recipe['revision'] if recipe['revision'] != 'N/A' else "", | ||
664 | "cannot be updated due to: %s" %(recipe['no_upgrade_reason']) if recipe['no_upgrade_reason'] else "")) | ||
646 | if not args.recipe: | 665 | if not args.recipe: |
647 | logger.info("Checking the upstream status for all recipes may take a few minutes") | 666 | logger.info("Checking the upstream status for all recipes may take a few minutes") |
648 | results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) | 667 | results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) |
649 | for result in results: | 668 | for recipegroup in results: |
650 | # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason | 669 | upgrades = [r for r in recipegroup if r['status'] != 'MATCH'] |
651 | if args.all or result[1] != 'MATCH': | 670 | currents = [r for r in recipegroup if r['status'] == 'MATCH'] |
652 | print("{:25} {:15} {:15} {} {} {}".format( result[0], | 671 | if len(upgrades) > 1: |
653 | result[2], | 672 | print("These recipes need to be upgraded together {") |
654 | result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"), | 673 | for r in sorted(upgrades, key=lambda r:r['pn']): |
655 | result[4], | 674 | _print_status(r) |
656 | result[5] if result[5] != 'N/A' else "", | 675 | if len(upgrades) > 1: |
657 | "cannot be updated due to: %s" %(result[6]) if result[6] else "")) | 676 | print("}") |
677 | for r in currents: | ||
678 | if args.all: | ||
679 | _print_status(r) | ||
658 | 680 | ||
659 | def register_commands(subparsers, context): | 681 | def register_commands(subparsers, context): |
660 | """Register devtool subcommands from this plugin""" | 682 | """Register devtool subcommands from this plugin""" |
diff --git a/scripts/lib/devtool/utilcmds.py b/scripts/lib/devtool/utilcmds.py index 964817766b..bf39f71b11 100644 --- a/scripts/lib/devtool/utilcmds.py +++ b/scripts/lib/devtool/utilcmds.py | |||
@@ -64,7 +64,7 @@ def configure_help(args, config, basepath, workspace): | |||
64 | b = rd.getVar('B') | 64 | b = rd.getVar('B') |
65 | s = rd.getVar('S') | 65 | s = rd.getVar('S') |
66 | configurescript = os.path.join(s, 'configure') | 66 | configurescript = os.path.join(s, 'configure') |
67 | confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or []) | 67 | confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (bb.build.listtasks(rd)) |
68 | configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') | 68 | configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') |
69 | extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') | 69 | extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') |
70 | extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') | 70 | extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') |
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py index 341e893305..041d79f162 100644 --- a/scripts/lib/recipetool/append.py +++ b/scripts/lib/recipetool/append.py | |||
@@ -101,7 +101,7 @@ def determine_file_source(targetpath, rd): | |||
101 | import oe.recipeutils | 101 | import oe.recipeutils |
102 | 102 | ||
103 | # See if it's in do_install for the recipe | 103 | # See if it's in do_install for the recipe |
104 | workdir = rd.getVar('WORKDIR') | 104 | unpackdir = rd.getVar('UNPACKDIR') |
105 | src_uri = rd.getVar('SRC_URI') | 105 | src_uri = rd.getVar('SRC_URI') |
106 | srcfile = '' | 106 | srcfile = '' |
107 | modpatches = [] | 107 | modpatches = [] |
@@ -113,9 +113,9 @@ def determine_file_source(targetpath, rd): | |||
113 | if not srcpath.startswith('/'): | 113 | if not srcpath.startswith('/'): |
114 | # Handle non-absolute path | 114 | # Handle non-absolute path |
115 | srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath)) | 115 | srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath)) |
116 | if srcpath.startswith(workdir): | 116 | if srcpath.startswith(unpackdir): |
117 | # OK, now we have the source file name, look for it in SRC_URI | 117 | # OK, now we have the source file name, look for it in SRC_URI |
118 | workdirfile = os.path.relpath(srcpath, workdir) | 118 | workdirfile = os.path.relpath(srcpath, unpackdir) |
119 | # FIXME this is where we ought to have some code in the fetcher, because this is naive | 119 | # FIXME this is where we ought to have some code in the fetcher, because this is naive |
120 | for item in src_uri.split(): | 120 | for item in src_uri.split(): |
121 | localpath = bb.fetch2.localpath(item, rd) | 121 | localpath = bb.fetch2.localpath(item, rd) |
@@ -317,7 +317,7 @@ def appendsrc(args, files, rd, extralines=None): | |||
317 | import oe.recipeutils | 317 | import oe.recipeutils |
318 | 318 | ||
319 | srcdir = rd.getVar('S') | 319 | srcdir = rd.getVar('S') |
320 | workdir = rd.getVar('WORKDIR') | 320 | unpackdir = rd.getVar('UNPACKDIR') |
321 | 321 | ||
322 | import bb.fetch | 322 | import bb.fetch |
323 | simplified = {} | 323 | simplified = {} |
@@ -336,10 +336,10 @@ def appendsrc(args, files, rd, extralines=None): | |||
336 | src_destdir = os.path.dirname(srcfile) | 336 | src_destdir = os.path.dirname(srcfile) |
337 | if not args.use_workdir: | 337 | if not args.use_workdir: |
338 | if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): | 338 | if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): |
339 | srcdir = os.path.join(workdir, 'git') | 339 | srcdir = os.path.join(unpackdir, rd.getVar('BB_GIT_DEFAULT_DESTSUFFIX')) |
340 | if not bb.data.inherits_class('kernel-yocto', rd): | 340 | if not bb.data.inherits_class('kernel-yocto', rd): |
341 | logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') | 341 | logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${UNPACKDIR}/${BB_GIT_DEFAULT_DESTSUFFIX}') |
342 | src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir) | 342 | src_destdir = os.path.join(os.path.relpath(srcdir, unpackdir), src_destdir) |
343 | src_destdir = os.path.normpath(src_destdir) | 343 | src_destdir = os.path.normpath(src_destdir) |
344 | 344 | ||
345 | if src_destdir and src_destdir != '.': | 345 | if src_destdir and src_destdir != '.': |
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py index 8e9ff38db6..ef0ba974a9 100644 --- a/scripts/lib/recipetool/create.py +++ b/scripts/lib/recipetool/create.py | |||
@@ -18,6 +18,8 @@ from urllib.parse import urlparse, urldefrag, urlsplit | |||
18 | import hashlib | 18 | import hashlib |
19 | import bb.fetch2 | 19 | import bb.fetch2 |
20 | logger = logging.getLogger('recipetool') | 20 | logger = logging.getLogger('recipetool') |
21 | from oe.license import tidy_licenses | ||
22 | from oe.license_finder import find_licenses | ||
21 | 23 | ||
22 | tinfoil = None | 24 | tinfoil = None |
23 | plugins = None | 25 | plugins = None |
@@ -528,7 +530,7 @@ def create_recipe(args): | |||
528 | if ftmpdir and args.keep_temp: | 530 | if ftmpdir and args.keep_temp: |
529 | logger.info('Fetch temp directory is %s' % ftmpdir) | 531 | logger.info('Fetch temp directory is %s' % ftmpdir) |
530 | 532 | ||
531 | dirlist = scriptutils.filter_src_subdirs(srctree) | 533 | dirlist = os.listdir(srctree) |
532 | logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) | 534 | logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) |
533 | if len(dirlist) == 1: | 535 | if len(dirlist) == 1: |
534 | singleitem = os.path.join(srctree, dirlist[0]) | 536 | singleitem = os.path.join(srctree, dirlist[0]) |
@@ -637,7 +639,6 @@ def create_recipe(args): | |||
637 | if len(splitline) > 1: | 639 | if len(splitline) > 1: |
638 | if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): | 640 | if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): |
639 | srcuri = reformat_git_uri(splitline[1]) + ';branch=master' | 641 | srcuri = reformat_git_uri(splitline[1]) + ';branch=master' |
640 | srcsubdir = 'git' | ||
641 | break | 642 | break |
642 | 643 | ||
643 | if args.src_subdir: | 644 | if args.src_subdir: |
@@ -735,7 +736,7 @@ def create_recipe(args): | |||
735 | if srcsubdir and not args.binary: | 736 | if srcsubdir and not args.binary: |
736 | # (for binary packages we explicitly specify subdir= when fetching to | 737 | # (for binary packages we explicitly specify subdir= when fetching to |
737 | # match the default value of S, so we don't need to set it in that case) | 738 | # match the default value of S, so we don't need to set it in that case) |
738 | lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) | 739 | lines_before.append('S = "${UNPACKDIR}/%s"' % srcsubdir) |
739 | lines_before.append('') | 740 | lines_before.append('') |
740 | 741 | ||
741 | if pkgarch: | 742 | if pkgarch: |
@@ -764,6 +765,7 @@ def create_recipe(args): | |||
764 | extrafiles = extravalues.pop('extrafiles', {}) | 765 | extrafiles = extravalues.pop('extrafiles', {}) |
765 | extra_pn = extravalues.pop('PN', None) | 766 | extra_pn = extravalues.pop('PN', None) |
766 | extra_pv = extravalues.pop('PV', None) | 767 | extra_pv = extravalues.pop('PV', None) |
768 | run_tasks = extravalues.pop('run_tasks', "").split() | ||
767 | 769 | ||
768 | if extra_pv and not realpv: | 770 | if extra_pv and not realpv: |
769 | realpv = extra_pv | 771 | realpv = extra_pv |
@@ -824,7 +826,8 @@ def create_recipe(args): | |||
824 | extraoutdir = os.path.join(os.path.dirname(outfile), pn) | 826 | extraoutdir = os.path.join(os.path.dirname(outfile), pn) |
825 | bb.utils.mkdirhier(extraoutdir) | 827 | bb.utils.mkdirhier(extraoutdir) |
826 | for destfn, extrafile in extrafiles.items(): | 828 | for destfn, extrafile in extrafiles.items(): |
827 | shutil.move(extrafile, os.path.join(extraoutdir, destfn)) | 829 | fn = destfn.format(pn=pn, pv=realpv) |
830 | shutil.move(extrafile, os.path.join(extraoutdir, fn)) | ||
828 | 831 | ||
829 | lines = lines_before | 832 | lines = lines_before |
830 | lines_before = [] | 833 | lines_before = [] |
@@ -839,7 +842,7 @@ def create_recipe(args): | |||
839 | line = line.replace(realpv, '${PV}') | 842 | line = line.replace(realpv, '${PV}') |
840 | if pn: | 843 | if pn: |
841 | line = line.replace(pn, '${BPN}') | 844 | line = line.replace(pn, '${BPN}') |
842 | if line == 'S = "${WORKDIR}/${BPN}-${PV}"': | 845 | if line == 'S = "${UNPACKDIR}/${BPN}-${PV}"' or 'tmp-recipetool-' in line: |
843 | skipblank = True | 846 | skipblank = True |
844 | continue | 847 | continue |
845 | elif line.startswith('SRC_URI = '): | 848 | elif line.startswith('SRC_URI = '): |
@@ -917,6 +920,10 @@ def create_recipe(args): | |||
917 | log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) | 920 | log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) |
918 | tinfoil.modified_files() | 921 | tinfoil.modified_files() |
919 | 922 | ||
923 | for task in run_tasks: | ||
924 | logger.info("Running task %s" % task) | ||
925 | tinfoil.build_file_sync(outfile, task) | ||
926 | |||
920 | if tempsrc: | 927 | if tempsrc: |
921 | if args.keep_temp: | 928 | if args.keep_temp: |
922 | logger.info('Preserving temporary directory %s' % tempsrc) | 929 | logger.info('Preserving temporary directory %s' % tempsrc) |
@@ -944,23 +951,13 @@ def fixup_license(value): | |||
944 | return '(' + value + ')' | 951 | return '(' + value + ')' |
945 | return value | 952 | return value |
946 | 953 | ||
947 | def tidy_licenses(value): | ||
948 | """Flat, split and sort licenses""" | ||
949 | from oe.license import flattened_licenses | ||
950 | def _choose(a, b): | ||
951 | str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold) | ||
952 | return ["(%s | %s)" % (str_a, str_b)] | ||
953 | if not isinstance(value, str): | ||
954 | value = " & ".join(value) | ||
955 | return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold) | ||
956 | |||
957 | def handle_license_vars(srctree, lines_before, handled, extravalues, d): | 954 | def handle_license_vars(srctree, lines_before, handled, extravalues, d): |
958 | lichandled = [x for x in handled if x[0] == 'license'] | 955 | lichandled = [x for x in handled if x[0] == 'license'] |
959 | if lichandled: | 956 | if lichandled: |
960 | # Someone else has already handled the license vars, just return their value | 957 | # Someone else has already handled the license vars, just return their value |
961 | return lichandled[0][1] | 958 | return lichandled[0][1] |
962 | 959 | ||
963 | licvalues = guess_license(srctree, d) | 960 | licvalues = find_licenses(srctree, d) |
964 | licenses = [] | 961 | licenses = [] |
965 | lic_files_chksum = [] | 962 | lic_files_chksum = [] |
966 | lic_unknown = [] | 963 | lic_unknown = [] |
@@ -1040,222 +1037,9 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d): | |||
1040 | handled.append(('license', licvalues)) | 1037 | handled.append(('license', licvalues)) |
1041 | return licvalues | 1038 | return licvalues |
1042 | 1039 | ||
1043 | def get_license_md5sums(d, static_only=False, linenumbers=False): | ||
1044 | import bb.utils | ||
1045 | import csv | ||
1046 | md5sums = {} | ||
1047 | if not static_only and not linenumbers: | ||
1048 | # Gather md5sums of license files in common license dir | ||
1049 | commonlicdir = d.getVar('COMMON_LICENSE_DIR') | ||
1050 | for fn in os.listdir(commonlicdir): | ||
1051 | md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn)) | ||
1052 | md5sums[md5value] = fn | ||
1053 | |||
1054 | # The following were extracted from common values in various recipes | ||
1055 | # (double checking the license against the license file itself, not just | ||
1056 | # the LICENSE value in the recipe) | ||
1057 | |||
1058 | # Read license md5sums from csv file | ||
1059 | scripts_path = os.path.dirname(os.path.realpath(__file__)) | ||
1060 | for path in (d.getVar('BBPATH').split(':') | ||
1061 | + [os.path.join(scripts_path, '..', '..')]): | ||
1062 | csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv') | ||
1063 | if os.path.isfile(csv_path): | ||
1064 | with open(csv_path, newline='') as csv_file: | ||
1065 | fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5'] | ||
1066 | reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames) | ||
1067 | for row in reader: | ||
1068 | if linenumbers: | ||
1069 | md5sums[row['md5sum']] = ( | ||
1070 | row['license'], row['beginline'], row['endline'], row['md5']) | ||
1071 | else: | ||
1072 | md5sums[row['md5sum']] = row['license'] | ||
1073 | |||
1074 | return md5sums | ||
1075 | |||
1076 | def crunch_known_licenses(d): | ||
1077 | ''' | ||
1078 | Calculate the MD5 checksums for the crunched versions of all common | ||
1079 | licenses. Also add additional known checksums. | ||
1080 | ''' | ||
1081 | |||
1082 | crunched_md5sums = {} | ||
1083 | |||
1084 | # common licenses | ||
1085 | crunched_md5sums['ad4e9d34a2e966dfe9837f18de03266d'] = 'GFDL-1.1-only' | ||
1086 | crunched_md5sums['d014fb11a34eb67dc717fdcfc97e60ed'] = 'GFDL-1.2-only' | ||
1087 | crunched_md5sums['e020ca655b06c112def28e597ab844f1'] = 'GFDL-1.3-only' | ||
1088 | |||
1089 | # The following two were gleaned from the "forever" npm package | ||
1090 | crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC' | ||
1091 | # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt | ||
1092 | crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause' | ||
1093 | # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE | ||
1094 | crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPL-2.0-only' | ||
1095 | # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt | ||
1096 | crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPL-2.0-only' | ||
1097 | # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1 | ||
1098 | crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPL-2.1-only' | ||
1099 | # unixODBC-2.3.4 COPYING | ||
1100 | crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPL-2.1-only' | ||
1101 | # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3 | ||
1102 | crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPL-3.0-only' | ||
1103 | # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10 | ||
1104 | crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0' | ||
1105 | |||
1106 | # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD | ||
1107 | crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause' | ||
1108 | # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE | ||
1109 | crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause' | ||
1110 | # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE | ||
1111 | crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause' | ||
1112 | # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE | ||
1113 | crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause' | ||
1114 | # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE | ||
1115 | crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause' | ||
1116 | # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE | ||
1117 | crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause' | ||
1118 | # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE | ||
1119 | crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause' | ||
1120 | # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE | ||
1121 | crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause' | ||
1122 | # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE | ||
1123 | crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause' | ||
1124 | # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE | ||
1125 | crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT' | ||
1126 | # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE | ||
1127 | crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT' | ||
1128 | # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE | ||
1129 | crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0' | ||
1130 | # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md | ||
1131 | crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0' | ||
1132 | # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE | ||
1133 | crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0' | ||
1134 | # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt | ||
1135 | crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0' | ||
1136 | # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE | ||
1137 | crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0' | ||
1138 | # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE | ||
1139 | crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense' | ||
1140 | # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md | ||
1141 | crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib' | ||
1142 | |||
1143 | commonlicdir = d.getVar('COMMON_LICENSE_DIR') | ||
1144 | for fn in sorted(os.listdir(commonlicdir)): | ||
1145 | md5value, lictext = crunch_license(os.path.join(commonlicdir, fn)) | ||
1146 | if md5value not in crunched_md5sums: | ||
1147 | crunched_md5sums[md5value] = fn | ||
1148 | elif fn != crunched_md5sums[md5value]: | ||
1149 | bb.debug(2, "crunched_md5sums['%s'] is already set to '%s' rather than '%s'" % (md5value, crunched_md5sums[md5value], fn)) | ||
1150 | else: | ||
1151 | bb.debug(2, "crunched_md5sums['%s'] is already set to '%s'" % (md5value, crunched_md5sums[md5value])) | ||
1152 | |||
1153 | return crunched_md5sums | ||
1154 | |||
1155 | def crunch_license(licfile): | ||
1156 | ''' | ||
1157 | Remove non-material text from a license file and then calculate its | ||
1158 | md5sum. This works well for licenses that contain a copyright statement, | ||
1159 | but is also a useful way to handle people's insistence upon reformatting | ||
1160 | the license text slightly (with no material difference to the text of the | ||
1161 | license). | ||
1162 | ''' | ||
1163 | |||
1164 | import oe.utils | ||
1165 | |||
1166 | # Note: these are carefully constructed! | ||
1167 | license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$') | ||
1168 | license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$') | ||
1169 | copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$') | ||
1170 | disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$') | ||
1171 | email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$') | ||
1172 | header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$') | ||
1173 | tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$') | ||
1174 | url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$') | ||
1175 | |||
1176 | lictext = [] | ||
1177 | with open(licfile, 'r', errors='surrogateescape') as f: | ||
1178 | for line in f: | ||
1179 | # Drop opening statements | ||
1180 | if copyright_re.match(line): | ||
1181 | continue | ||
1182 | elif disclaimer_re.match(line): | ||
1183 | continue | ||
1184 | elif email_re.match(line): | ||
1185 | continue | ||
1186 | elif header_re.match(line): | ||
1187 | continue | ||
1188 | elif tag_re.match(line): | ||
1189 | continue | ||
1190 | elif url_re.match(line): | ||
1191 | continue | ||
1192 | elif license_title_re.match(line): | ||
1193 | continue | ||
1194 | elif license_statement_re.match(line): | ||
1195 | continue | ||
1196 | # Strip comment symbols | ||
1197 | line = line.replace('*', '') \ | ||
1198 | .replace('#', '') | ||
1199 | # Unify spelling | ||
1200 | line = line.replace('sub-license', 'sublicense') | ||
1201 | # Squash spaces | ||
1202 | line = oe.utils.squashspaces(line.strip()) | ||
1203 | # Replace smart quotes, double quotes and backticks with single quotes | ||
1204 | line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'') | ||
1205 | # Unify brackets | ||
1206 | line = line.replace("{", "[").replace("}", "]") | ||
1207 | if line: | ||
1208 | lictext.append(line) | ||
1209 | |||
1210 | m = hashlib.md5() | ||
1211 | try: | ||
1212 | m.update(' '.join(lictext).encode('utf-8')) | ||
1213 | md5val = m.hexdigest() | ||
1214 | except UnicodeEncodeError: | ||
1215 | md5val = None | ||
1216 | lictext = '' | ||
1217 | return md5val, lictext | ||
1218 | |||
1219 | def guess_license(srctree, d): | ||
1220 | import bb | ||
1221 | md5sums = get_license_md5sums(d) | ||
1222 | |||
1223 | crunched_md5sums = crunch_known_licenses(d) | ||
1224 | |||
1225 | licenses = [] | ||
1226 | licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] | ||
1227 | skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go") | ||
1228 | licfiles = [] | ||
1229 | for root, dirs, files in os.walk(srctree): | ||
1230 | for fn in files: | ||
1231 | if fn.endswith(skip_extensions): | ||
1232 | continue | ||
1233 | for spec in licspecs: | ||
1234 | if fnmatch.fnmatch(fn, spec): | ||
1235 | fullpath = os.path.join(root, fn) | ||
1236 | if not fullpath in licfiles: | ||
1237 | licfiles.append(fullpath) | ||
1238 | for licfile in sorted(licfiles): | ||
1239 | md5value = bb.utils.md5_file(licfile) | ||
1240 | license = md5sums.get(md5value, None) | ||
1241 | if not license: | ||
1242 | crunched_md5, lictext = crunch_license(licfile) | ||
1243 | license = crunched_md5sums.get(crunched_md5, None) | ||
1244 | if lictext and not license: | ||
1245 | license = 'Unknown' | ||
1246 | logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \ | ||
1247 | "and replace `Unknown` with the license:\n" \ | ||
1248 | "%s,Unknown" % (os.path.relpath(licfile, srctree), md5value)) | ||
1249 | if license: | ||
1250 | licenses.append((license, os.path.relpath(licfile, srctree), md5value)) | ||
1251 | |||
1252 | # FIXME should we grab at least one source file with a license header and add that too? | ||
1253 | |||
1254 | return licenses | ||
1255 | |||
1256 | def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): | 1040 | def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): |
1257 | """ | 1041 | """ |
1258 | Given a list of (license, path, md5sum) as returned by guess_license(), | 1042 | Given a list of (license, path, md5sum) as returned by match_licenses(), |
1259 | a dict of package name to path mappings, write out a set of | 1043 | a dict of package name to path mappings, write out a set of |
1260 | package-specific LICENSE values. | 1044 | package-specific LICENSE values. |
1261 | """ | 1045 | """ |
@@ -1284,6 +1068,14 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn | |||
1284 | outlicenses[pkgname] = licenses | 1068 | outlicenses[pkgname] = licenses |
1285 | return outlicenses | 1069 | return outlicenses |
1286 | 1070 | ||
1071 | def generate_common_licenses_chksums(common_licenses, d): | ||
1072 | lic_files_chksums = [] | ||
1073 | for license in tidy_licenses(common_licenses): | ||
1074 | licfile = '${COMMON_LICENSE_DIR}/' + license | ||
1075 | md5value = bb.utils.md5_file(d.expand(licfile)) | ||
1076 | lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value)) | ||
1077 | return lic_files_chksums | ||
1078 | |||
1287 | def read_pkgconfig_provides(d): | 1079 | def read_pkgconfig_provides(d): |
1288 | pkgdatadir = d.getVar('PKGDATA_DIR') | 1080 | pkgdatadir = d.getVar('PKGDATA_DIR') |
1289 | pkgmap = {} | 1081 | pkgmap = {} |
@@ -1418,4 +1210,3 @@ def register_commands(subparsers): | |||
1418 | parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) | 1210 | parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) |
1419 | parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') | 1211 | parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') |
1420 | parser_create.set_defaults(func=create_recipe) | 1212 | parser_create.set_defaults(func=create_recipe) |
1421 | |||
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py index c560831442..4b1fa39d13 100644 --- a/scripts/lib/recipetool/create_go.py +++ b/scripts/lib/recipetool/create_go.py | |||
@@ -10,13 +10,7 @@ | |||
10 | # | 10 | # |
11 | 11 | ||
12 | 12 | ||
13 | from collections import namedtuple | ||
14 | from enum import Enum | ||
15 | from html.parser import HTMLParser | ||
16 | from recipetool.create import RecipeHandler, handle_license_vars | 13 | from recipetool.create import RecipeHandler, handle_license_vars |
17 | from recipetool.create import guess_license, tidy_licenses, fixup_license | ||
18 | from recipetool.create import determine_from_url | ||
19 | from urllib.error import URLError | ||
20 | 14 | ||
21 | import bb.utils | 15 | import bb.utils |
22 | import json | 16 | import json |
@@ -25,33 +19,20 @@ import os | |||
25 | import re | 19 | import re |
26 | import subprocess | 20 | import subprocess |
27 | import sys | 21 | import sys |
28 | import shutil | ||
29 | import tempfile | 22 | import tempfile |
30 | import urllib.parse | ||
31 | import urllib.request | ||
32 | 23 | ||
33 | 24 | ||
34 | GoImport = namedtuple('GoImport', 'root vcs url suffix') | ||
35 | logger = logging.getLogger('recipetool') | 25 | logger = logging.getLogger('recipetool') |
36 | CodeRepo = namedtuple( | ||
37 | 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor') | ||
38 | 26 | ||
39 | tinfoil = None | 27 | tinfoil = None |
40 | 28 | ||
41 | # Regular expression to parse pseudo semantic version | ||
42 | # see https://go.dev/ref/mod#pseudo-versions | ||
43 | re_pseudo_semver = re.compile( | ||
44 | r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$") | ||
45 | # Regular expression to parse semantic version | ||
46 | re_semver = re.compile( | ||
47 | r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$") | ||
48 | |||
49 | 29 | ||
50 | def tinfoil_init(instance): | 30 | def tinfoil_init(instance): |
51 | global tinfoil | 31 | global tinfoil |
52 | tinfoil = instance | 32 | tinfoil = instance |
53 | 33 | ||
54 | 34 | ||
35 | |||
55 | class GoRecipeHandler(RecipeHandler): | 36 | class GoRecipeHandler(RecipeHandler): |
56 | """Class to handle the go recipe creation""" | 37 | """Class to handle the go recipe creation""" |
57 | 38 | ||
@@ -83,580 +64,6 @@ class GoRecipeHandler(RecipeHandler): | |||
83 | 64 | ||
84 | return bindir | 65 | return bindir |
85 | 66 | ||
86 | def __resolve_repository_static(self, modulepath): | ||
87 | """Resolve the repository in a static manner | ||
88 | |||
89 | The method is based on the go implementation of | ||
90 | `repoRootFromVCSPaths` in | ||
91 | https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go | ||
92 | """ | ||
93 | |||
94 | url = urllib.parse.urlparse("https://" + modulepath) | ||
95 | req = urllib.request.Request(url.geturl()) | ||
96 | |||
97 | try: | ||
98 | resp = urllib.request.urlopen(req) | ||
99 | # Some modulepath are just redirects to github (or some other vcs | ||
100 | # hoster). Therefore, we check if this modulepath redirects to | ||
101 | # somewhere else | ||
102 | if resp.geturl() != url.geturl(): | ||
103 | bb.debug(1, "%s is redirectred to %s" % | ||
104 | (url.geturl(), resp.geturl())) | ||
105 | url = urllib.parse.urlparse(resp.geturl()) | ||
106 | modulepath = url.netloc + url.path | ||
107 | |||
108 | except URLError as url_err: | ||
109 | # This is probably because the module path | ||
110 | # contains the subdir and major path. Thus, | ||
111 | # we ignore this error for now | ||
112 | logger.debug( | ||
113 | 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err))) | ||
114 | |||
115 | host, _, _ = modulepath.partition('/') | ||
116 | |||
117 | class vcs(Enum): | ||
118 | pathprefix = "pathprefix" | ||
119 | regexp = "regexp" | ||
120 | type = "type" | ||
121 | repo = "repo" | ||
122 | check = "check" | ||
123 | schemelessRepo = "schemelessRepo" | ||
124 | |||
125 | # GitHub | ||
126 | vcsGitHub = {} | ||
127 | vcsGitHub[vcs.pathprefix] = "github.com" | ||
128 | vcsGitHub[vcs.regexp] = re.compile( | ||
129 | r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
130 | vcsGitHub[vcs.type] = "git" | ||
131 | vcsGitHub[vcs.repo] = "https://\\g<root>" | ||
132 | |||
133 | # Bitbucket | ||
134 | vcsBitbucket = {} | ||
135 | vcsBitbucket[vcs.pathprefix] = "bitbucket.org" | ||
136 | vcsBitbucket[vcs.regexp] = re.compile( | ||
137 | r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
138 | vcsBitbucket[vcs.type] = "git" | ||
139 | vcsBitbucket[vcs.repo] = "https://\\g<root>" | ||
140 | |||
141 | # IBM DevOps Services (JazzHub) | ||
142 | vcsIBMDevOps = {} | ||
143 | vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git" | ||
144 | vcsIBMDevOps[vcs.regexp] = re.compile( | ||
145 | r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
146 | vcsIBMDevOps[vcs.type] = "git" | ||
147 | vcsIBMDevOps[vcs.repo] = "https://\\g<root>" | ||
148 | |||
149 | # Git at Apache | ||
150 | vcsApacheGit = {} | ||
151 | vcsApacheGit[vcs.pathprefix] = "git.apache.org" | ||
152 | vcsApacheGit[vcs.regexp] = re.compile( | ||
153 | r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
154 | vcsApacheGit[vcs.type] = "git" | ||
155 | vcsApacheGit[vcs.repo] = "https://\\g<root>" | ||
156 | |||
157 | # Git at OpenStack | ||
158 | vcsOpenStackGit = {} | ||
159 | vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org" | ||
160 | vcsOpenStackGit[vcs.regexp] = re.compile( | ||
161 | r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
162 | vcsOpenStackGit[vcs.type] = "git" | ||
163 | vcsOpenStackGit[vcs.repo] = "https://\\g<root>" | ||
164 | |||
165 | # chiselapp.com for fossil | ||
166 | vcsChiselapp = {} | ||
167 | vcsChiselapp[vcs.pathprefix] = "chiselapp.com" | ||
168 | vcsChiselapp[vcs.regexp] = re.compile( | ||
169 | r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$') | ||
170 | vcsChiselapp[vcs.type] = "fossil" | ||
171 | vcsChiselapp[vcs.repo] = "https://\\g<root>" | ||
172 | |||
173 | # General syntax for any server. | ||
174 | # Must be last. | ||
175 | vcsGeneralServer = {} | ||
176 | vcsGeneralServer[vcs.regexp] = re.compile( | ||
177 | "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$") | ||
178 | vcsGeneralServer[vcs.schemelessRepo] = True | ||
179 | |||
180 | vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps, | ||
181 | vcsApacheGit, vcsOpenStackGit, vcsChiselapp, | ||
182 | vcsGeneralServer] | ||
183 | |||
184 | if modulepath.startswith("example.net") or modulepath == "rsc.io": | ||
185 | logger.warning("Suspicious module path %s" % modulepath) | ||
186 | return None | ||
187 | if modulepath.startswith("http:") or modulepath.startswith("https:"): | ||
188 | logger.warning("Import path should not start with %s %s" % | ||
189 | ("http", "https")) | ||
190 | return None | ||
191 | |||
192 | rootpath = None | ||
193 | vcstype = None | ||
194 | repourl = None | ||
195 | suffix = None | ||
196 | |||
197 | for srv in vcsPaths: | ||
198 | m = srv[vcs.regexp].match(modulepath) | ||
199 | if vcs.pathprefix in srv: | ||
200 | if host == srv[vcs.pathprefix]: | ||
201 | rootpath = m.group('root') | ||
202 | vcstype = srv[vcs.type] | ||
203 | repourl = m.expand(srv[vcs.repo]) | ||
204 | suffix = m.group('suffix') | ||
205 | break | ||
206 | elif m and srv[vcs.schemelessRepo]: | ||
207 | rootpath = m.group('root') | ||
208 | vcstype = m[vcs.type] | ||
209 | repourl = m[vcs.repo] | ||
210 | suffix = m.group('suffix') | ||
211 | break | ||
212 | |||
213 | return GoImport(rootpath, vcstype, repourl, suffix) | ||
214 | |||
215 | def __resolve_repository_dynamic(self, modulepath): | ||
216 | """Resolve the repository root in a dynamic manner. | ||
217 | |||
218 | The method is based on the go implementation of | ||
219 | `repoRootForImportDynamic` in | ||
220 | https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go | ||
221 | """ | ||
222 | url = urllib.parse.urlparse("https://" + modulepath) | ||
223 | |||
224 | class GoImportHTMLParser(HTMLParser): | ||
225 | |||
226 | def __init__(self): | ||
227 | super().__init__() | ||
228 | self.__srv = [] | ||
229 | |||
230 | def handle_starttag(self, tag, attrs): | ||
231 | if tag == 'meta' and list( | ||
232 | filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)): | ||
233 | content = list( | ||
234 | filter(lambda a: (a[0] == 'content'), attrs)) | ||
235 | if content: | ||
236 | self.__srv = content[0][1].split() | ||
237 | |||
238 | @property | ||
239 | def import_prefix(self): | ||
240 | return self.__srv[0] if len(self.__srv) else None | ||
241 | |||
242 | @property | ||
243 | def vcs(self): | ||
244 | return self.__srv[1] if len(self.__srv) else None | ||
245 | |||
246 | @property | ||
247 | def repourl(self): | ||
248 | return self.__srv[2] if len(self.__srv) else None | ||
249 | |||
250 | url = url.geturl() + "?go-get=1" | ||
251 | req = urllib.request.Request(url) | ||
252 | |||
253 | try: | ||
254 | resp = urllib.request.urlopen(req) | ||
255 | |||
256 | except URLError as url_err: | ||
257 | logger.warning( | ||
258 | "Failed to fetch page from [%s]: %s", url, str(url_err)) | ||
259 | return None | ||
260 | |||
261 | parser = GoImportHTMLParser() | ||
262 | parser.feed(resp.read().decode('utf-8')) | ||
263 | parser.close() | ||
264 | |||
265 | return GoImport(parser.import_prefix, parser.vcs, parser.repourl, None) | ||
266 | |||
267 | def __resolve_from_golang_proxy(self, modulepath, version): | ||
268 | """ | ||
269 | Resolves repository data from golang proxy | ||
270 | """ | ||
271 | url = urllib.parse.urlparse("https://proxy.golang.org/" | ||
272 | + modulepath | ||
273 | + "/@v/" | ||
274 | + version | ||
275 | + ".info") | ||
276 | |||
277 | # Transform url to lower case, golang proxy doesn't like mixed case | ||
278 | req = urllib.request.Request(url.geturl().lower()) | ||
279 | |||
280 | try: | ||
281 | resp = urllib.request.urlopen(req) | ||
282 | except URLError as url_err: | ||
283 | logger.warning( | ||
284 | "Failed to fetch page from [%s]: %s", url, str(url_err)) | ||
285 | return None | ||
286 | |||
287 | golang_proxy_res = resp.read().decode('utf-8') | ||
288 | modinfo = json.loads(golang_proxy_res) | ||
289 | |||
290 | if modinfo and 'Origin' in modinfo: | ||
291 | origin = modinfo['Origin'] | ||
292 | _root_url = urllib.parse.urlparse(origin['URL']) | ||
293 | |||
294 | # We normalize the repo URL since we don't want the scheme in it | ||
295 | _subdir = origin['Subdir'] if 'Subdir' in origin else None | ||
296 | _root, _, _ = self.__split_path_version(modulepath) | ||
297 | if _subdir: | ||
298 | _root = _root[:-len(_subdir)].strip('/') | ||
299 | |||
300 | _commit = origin['Hash'] | ||
301 | _vcs = origin['VCS'] | ||
302 | return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit) | ||
303 | |||
304 | return None | ||
305 | |||
306 | def __resolve_repository(self, modulepath): | ||
307 | """ | ||
308 | Resolves src uri from go module-path | ||
309 | """ | ||
310 | repodata = self.__resolve_repository_static(modulepath) | ||
311 | if not repodata or not repodata.url: | ||
312 | repodata = self.__resolve_repository_dynamic(modulepath) | ||
313 | if not repodata or not repodata.url: | ||
314 | logger.error( | ||
315 | "Could not resolve repository for module path '%s'" % modulepath) | ||
316 | # There is no way to recover from this | ||
317 | sys.exit(14) | ||
318 | if repodata: | ||
319 | logger.debug(1, "Resolved download path for import '%s' => %s" % ( | ||
320 | modulepath, repodata.url)) | ||
321 | return repodata | ||
322 | |||
323 | def __split_path_version(self, path): | ||
324 | i = len(path) | ||
325 | dot = False | ||
326 | for j in range(i, 0, -1): | ||
327 | if path[j - 1] < '0' or path[j - 1] > '9': | ||
328 | break | ||
329 | if path[j - 1] == '.': | ||
330 | dot = True | ||
331 | break | ||
332 | i = j - 1 | ||
333 | |||
334 | if i <= 1 or i == len( | ||
335 | path) or path[i - 1] != 'v' or path[i - 2] != '/': | ||
336 | return path, "", True | ||
337 | |||
338 | prefix, pathMajor = path[:i - 2], path[i - 2:] | ||
339 | if dot or len( | ||
340 | pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1": | ||
341 | return path, "", False | ||
342 | |||
343 | return prefix, pathMajor, True | ||
344 | |||
345 | def __get_path_major(self, pathMajor): | ||
346 | if not pathMajor: | ||
347 | return "" | ||
348 | |||
349 | if pathMajor[0] != '/' and pathMajor[0] != '.': | ||
350 | logger.error( | ||
351 | "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor) | ||
352 | |||
353 | if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"): | ||
354 | pathMajor = pathMajor[:len("-unstable") - 2] | ||
355 | |||
356 | return pathMajor[1:] | ||
357 | |||
358 | def __build_coderepo(self, repo, path): | ||
359 | codedir = "" | ||
360 | pathprefix, pathMajor, _ = self.__split_path_version(path) | ||
361 | if repo.root == path: | ||
362 | pathprefix = path | ||
363 | elif path.startswith(repo.root): | ||
364 | codedir = pathprefix[len(repo.root):].strip('/') | ||
365 | |||
366 | pseudoMajor = self.__get_path_major(pathMajor) | ||
367 | |||
368 | logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'", | ||
369 | repo.root, codedir, pathprefix, pathMajor, pseudoMajor) | ||
370 | |||
371 | return CodeRepo(path, repo.root, codedir, | ||
372 | pathMajor, pathprefix, pseudoMajor) | ||
373 | |||
374 | def __resolve_version(self, repo, path, version): | ||
375 | hash = None | ||
376 | coderoot = self.__build_coderepo(repo, path) | ||
377 | |||
378 | def vcs_fetch_all(): | ||
379 | tmpdir = tempfile.mkdtemp() | ||
380 | clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir) | ||
381 | bb.process.run(clone_cmd) | ||
382 | log_cmd = "git log --all --pretty='%H %d' --decorate=short" | ||
383 | output, _ = bb.process.run( | ||
384 | log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir) | ||
385 | bb.utils.prunedir(tmpdir) | ||
386 | return output.strip().split('\n') | ||
387 | |||
388 | def vcs_fetch_remote(tag): | ||
389 | # add * to grab ^{} | ||
390 | refs = {} | ||
391 | ls_remote_cmd = "git ls-remote -q --tags {} {}*".format( | ||
392 | repo.url, tag) | ||
393 | output, _ = bb.process.run(ls_remote_cmd) | ||
394 | output = output.strip().split('\n') | ||
395 | for line in output: | ||
396 | f = line.split(maxsplit=1) | ||
397 | if len(f) != 2: | ||
398 | continue | ||
399 | |||
400 | for prefix in ["HEAD", "refs/heads/", "refs/tags/"]: | ||
401 | if f[1].startswith(prefix): | ||
402 | refs[f[1][len(prefix):]] = f[0] | ||
403 | |||
404 | for key, hash in refs.items(): | ||
405 | if key.endswith(r"^{}"): | ||
406 | refs[key.strip(r"^{}")] = hash | ||
407 | |||
408 | return refs[tag] | ||
409 | |||
410 | m_pseudo_semver = re_pseudo_semver.match(version) | ||
411 | |||
412 | if m_pseudo_semver: | ||
413 | remote_refs = vcs_fetch_all() | ||
414 | short_commit = m_pseudo_semver.group('commithash') | ||
415 | for l in remote_refs: | ||
416 | r = l.split(maxsplit=1) | ||
417 | sha1 = r[0] if len(r) else None | ||
418 | if not sha1: | ||
419 | logger.error( | ||
420 | "Ups: could not resolve abbref commit for %s" % short_commit) | ||
421 | |||
422 | elif sha1.startswith(short_commit): | ||
423 | hash = sha1 | ||
424 | break | ||
425 | else: | ||
426 | m_semver = re_semver.match(version) | ||
427 | if m_semver: | ||
428 | |||
429 | def get_sha1_remote(re): | ||
430 | rsha1 = None | ||
431 | for line in remote_refs: | ||
432 | # Split lines of the following format: | ||
433 | # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag) | ||
434 | lineparts = line.split(maxsplit=1) | ||
435 | sha1 = lineparts[0] if len(lineparts) else None | ||
436 | refstring = lineparts[1] if len( | ||
437 | lineparts) == 2 else None | ||
438 | if refstring: | ||
439 | # Normalize tag string and split in case of multiple | ||
440 | # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...) | ||
441 | refs = refstring.strip('(), ').split(',') | ||
442 | for ref in refs: | ||
443 | if re.match(ref.strip()): | ||
444 | rsha1 = sha1 | ||
445 | return rsha1 | ||
446 | |||
447 | semver = "v" + m_semver.group('major') + "."\ | ||
448 | + m_semver.group('minor') + "."\ | ||
449 | + m_semver.group('patch') \ | ||
450 | + (("-" + m_semver.group('prerelease')) | ||
451 | if m_semver.group('prerelease') else "") | ||
452 | |||
453 | tag = os.path.join( | ||
454 | coderoot.codeDir, semver) if coderoot.codeDir else semver | ||
455 | |||
456 | # probe tag using 'ls-remote', which is faster than fetching | ||
457 | # complete history | ||
458 | hash = vcs_fetch_remote(tag) | ||
459 | if not hash: | ||
460 | # backup: fetch complete history | ||
461 | remote_refs = vcs_fetch_all() | ||
462 | hash = get_sha1_remote( | ||
463 | re.compile(fr"(tag:|HEAD ->) ({tag})")) | ||
464 | |||
465 | logger.debug( | ||
466 | "Resolving commit for tag '%s' -> '%s'", tag, hash) | ||
467 | return hash | ||
468 | |||
469 | def __generate_srcuri_inline_fcn(self, path, version, replaces=None): | ||
470 | """Generate SRC_URI functions for go imports""" | ||
471 | |||
472 | logger.info("Resolving repository for module %s", path) | ||
473 | # First try to resolve repo and commit from golang proxy | ||
474 | # Most info is already there and we don't have to go through the | ||
475 | # repository or even perform the version resolve magic | ||
476 | golang_proxy_info = self.__resolve_from_golang_proxy(path, version) | ||
477 | if golang_proxy_info: | ||
478 | repo = golang_proxy_info[0] | ||
479 | commit = golang_proxy_info[1] | ||
480 | else: | ||
481 | # Fallback | ||
482 | # Resolve repository by 'hand' | ||
483 | repo = self.__resolve_repository(path) | ||
484 | commit = self.__resolve_version(repo, path, version) | ||
485 | |||
486 | url = urllib.parse.urlparse(repo.url) | ||
487 | repo_url = url.netloc + url.path | ||
488 | |||
489 | coderoot = self.__build_coderepo(repo, path) | ||
490 | |||
491 | inline_fcn = "${@go_src_uri(" | ||
492 | inline_fcn += f"'{repo_url}','{version}'" | ||
493 | if repo_url != path: | ||
494 | inline_fcn += f",path='{path}'" | ||
495 | if coderoot.codeDir: | ||
496 | inline_fcn += f",subdir='{coderoot.codeDir}'" | ||
497 | if repo.vcs != 'git': | ||
498 | inline_fcn += f",vcs='{repo.vcs}'" | ||
499 | if replaces: | ||
500 | inline_fcn += f",replaces='{replaces}'" | ||
501 | if coderoot.pathMajor: | ||
502 | inline_fcn += f",pathmajor='{coderoot.pathMajor}'" | ||
503 | inline_fcn += ")}" | ||
504 | |||
505 | return inline_fcn, commit | ||
506 | |||
507 | def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d): | ||
508 | |||
509 | import re | ||
510 | src_uris = [] | ||
511 | src_revs = [] | ||
512 | |||
513 | def generate_src_rev(path, version, commithash): | ||
514 | src_rev = f"# {path}@{version} => {commithash}\n" | ||
515 | # Ups...maybe someone manipulated the source repository and the | ||
516 | # version or commit could not be resolved. This is a sign of | ||
517 | # a) the supply chain was manipulated (bad) | ||
518 | # b) the implementation for the version resolving didn't work | ||
519 | # anymore (less bad) | ||
520 | if not commithash: | ||
521 | src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" | ||
522 | src_rev += f"#!!! Could not resolve version !!!\n" | ||
523 | src_rev += f"#!!! Possible supply chain attack !!!\n" | ||
524 | src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" | ||
525 | src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\"" | ||
526 | |||
527 | return src_rev | ||
528 | |||
529 | # we first go over replacement list, because we are essentialy | ||
530 | # interested only in the replaced path | ||
531 | if go_mod['Replace']: | ||
532 | for replacement in go_mod['Replace']: | ||
533 | oldpath = replacement['Old']['Path'] | ||
534 | path = replacement['New']['Path'] | ||
535 | version = '' | ||
536 | if 'Version' in replacement['New']: | ||
537 | version = replacement['New']['Version'] | ||
538 | |||
539 | if os.path.exists(os.path.join(srctree, path)): | ||
540 | # the module refers to the local path, remove it from requirement list | ||
541 | # because it's a local module | ||
542 | go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath] | ||
543 | else: | ||
544 | # Replace the path and the version, so we don't iterate replacement list anymore | ||
545 | for require in go_mod['Require']: | ||
546 | if require['Path'] == oldpath: | ||
547 | require.update({'Path': path, 'Version': version}) | ||
548 | break | ||
549 | |||
550 | for require in go_mod['Require']: | ||
551 | path = require['Path'] | ||
552 | version = require['Version'] | ||
553 | |||
554 | inline_fcn, commithash = self.__generate_srcuri_inline_fcn( | ||
555 | path, version) | ||
556 | src_uris.append(inline_fcn) | ||
557 | src_revs.append(generate_src_rev(path, version, commithash)) | ||
558 | |||
559 | # strip version part from module URL /vXX | ||
560 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
561 | pn, _ = determine_from_url(baseurl) | ||
562 | go_mods_basename = "%s-modules.inc" % pn | ||
563 | |||
564 | go_mods_filename = os.path.join(localfilesdir, go_mods_basename) | ||
565 | with open(go_mods_filename, "w") as f: | ||
566 | # We introduce this indirection to make the tests a little easier | ||
567 | f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n") | ||
568 | f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n") | ||
569 | for uri in src_uris: | ||
570 | f.write(" " + uri + " \\\n") | ||
571 | f.write("\"\n\n") | ||
572 | for rev in src_revs: | ||
573 | f.write(rev + "\n") | ||
574 | |||
575 | extravalues['extrafiles'][go_mods_basename] = go_mods_filename | ||
576 | |||
577 | def __go_run_cmd(self, cmd, cwd, d): | ||
578 | return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')), | ||
579 | shell=True, cwd=cwd) | ||
580 | |||
581 | def __go_native_version(self, d): | ||
582 | stdout, _ = self.__go_run_cmd("go version", None, d) | ||
583 | m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout) | ||
584 | major = int(m.group(2)) | ||
585 | minor = int(m.group(3)) | ||
586 | patch = int(m.group(4)) | ||
587 | |||
588 | return major, minor, patch | ||
589 | |||
590 | def __go_mod_patch(self, srctree, localfilesdir, extravalues, d): | ||
591 | |||
592 | patchfilename = "go.mod.patch" | ||
593 | go_native_version_major, go_native_version_minor, _ = self.__go_native_version( | ||
594 | d) | ||
595 | self.__go_run_cmd("go mod tidy -go=%d.%d" % | ||
596 | (go_native_version_major, go_native_version_minor), srctree, d) | ||
597 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | ||
598 | |||
599 | # Create patch in order to upgrade go version | ||
600 | self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d) | ||
601 | # Restore original state | ||
602 | self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d) | ||
603 | |||
604 | go_mod = json.loads(stdout) | ||
605 | tmpfile = os.path.join(localfilesdir, patchfilename) | ||
606 | shutil.move(os.path.join(srctree, patchfilename), tmpfile) | ||
607 | |||
608 | extravalues['extrafiles'][patchfilename] = tmpfile | ||
609 | |||
610 | return go_mod, patchfilename | ||
611 | |||
612 | def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d): | ||
613 | # Perform vendoring to retrieve the correct modules.txt | ||
614 | tmp_vendor_dir = tempfile.mkdtemp() | ||
615 | |||
616 | # -v causes to go to print modules.txt to stderr | ||
617 | _, stderr = self.__go_run_cmd( | ||
618 | "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d) | ||
619 | |||
620 | modules_txt_basename = "modules.txt" | ||
621 | modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename) | ||
622 | with open(modules_txt_filename, "w") as f: | ||
623 | f.write(stderr) | ||
624 | |||
625 | extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename | ||
626 | |||
627 | licenses = [] | ||
628 | lic_files_chksum = [] | ||
629 | licvalues = guess_license(tmp_vendor_dir, d) | ||
630 | shutil.rmtree(tmp_vendor_dir) | ||
631 | |||
632 | if licvalues: | ||
633 | for licvalue in licvalues: | ||
634 | license = licvalue[0] | ||
635 | lics = tidy_licenses(fixup_license(license)) | ||
636 | lics = [lic for lic in lics if lic not in licenses] | ||
637 | if len(lics): | ||
638 | licenses.extend(lics) | ||
639 | lic_files_chksum.append( | ||
640 | 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2])) | ||
641 | |||
642 | # strip version part from module URL /vXX | ||
643 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
644 | pn, _ = determine_from_url(baseurl) | ||
645 | licenses_basename = "%s-licenses.inc" % pn | ||
646 | |||
647 | licenses_filename = os.path.join(localfilesdir, licenses_basename) | ||
648 | with open(licenses_filename, "w") as f: | ||
649 | f.write("GO_MOD_LICENSES = \"%s\"\n\n" % | ||
650 | ' & '.join(sorted(licenses, key=str.casefold))) | ||
651 | # We introduce this indirection to make the tests a little easier | ||
652 | f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n") | ||
653 | f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n") | ||
654 | for lic in lic_files_chksum: | ||
655 | f.write(" " + lic + " \\\n") | ||
656 | f.write("\"\n") | ||
657 | |||
658 | extravalues['extrafiles'][licenses_basename] = licenses_filename | ||
659 | |||
660 | def process(self, srctree, classes, lines_before, | 67 | def process(self, srctree, classes, lines_before, |
661 | lines_after, handled, extravalues): | 68 | lines_after, handled, extravalues): |
662 | 69 | ||
@@ -667,63 +74,52 @@ class GoRecipeHandler(RecipeHandler): | |||
667 | if not files: | 74 | if not files: |
668 | return False | 75 | return False |
669 | 76 | ||
670 | d = bb.data.createCopy(tinfoil.config_data) | ||
671 | go_bindir = self.__ensure_go() | 77 | go_bindir = self.__ensure_go() |
672 | if not go_bindir: | 78 | if not go_bindir: |
673 | sys.exit(14) | 79 | sys.exit(14) |
674 | 80 | ||
675 | d.prependVar('PATH', '%s:' % go_bindir) | ||
676 | handled.append('buildsystem') | 81 | handled.append('buildsystem') |
677 | classes.append("go-vendor") | 82 | classes.append("go-mod") |
678 | 83 | ||
679 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | 84 | # Use go-mod-update-modules to set the full SRC_URI and LICENSE |
85 | classes.append("go-mod-update-modules") | ||
86 | extravalues["run_tasks"] = "update_modules" | ||
680 | 87 | ||
681 | go_mod = json.loads(stdout) | 88 | with tempfile.TemporaryDirectory(prefix="go-mod-") as tmp_mod_dir: |
682 | go_import = go_mod['Module']['Path'] | 89 | env = dict(os.environ) |
683 | go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go']) | 90 | env["PATH"] += f":{go_bindir}" |
684 | go_version_major = int(go_version_match.group(1)) | 91 | env['GOMODCACHE'] = tmp_mod_dir |
685 | go_version_minor = int(go_version_match.group(2)) | ||
686 | src_uris = [] | ||
687 | 92 | ||
688 | localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') | 93 | stdout = subprocess.check_output(["go", "mod", "edit", "-json"], cwd=srctree, env=env, text=True) |
689 | extravalues.setdefault('extrafiles', {}) | 94 | go_mod = json.loads(stdout) |
95 | go_import = re.sub(r'/v([0-9]+)$', '', go_mod['Module']['Path']) | ||
690 | 96 | ||
691 | # Use an explicit name determined from the module name because it | 97 | localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') |
692 | # might differ from the actual URL for replaced modules | 98 | extravalues.setdefault('extrafiles', {}) |
693 | # strip version part from module URL /vXX | ||
694 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
695 | pn, _ = determine_from_url(baseurl) | ||
696 | 99 | ||
697 | # go.mod files with version < 1.17 may not include all indirect | 100 | # Write the stub ${BPN}-licenses.inc and ${BPN}-go-mods.inc files |
698 | # dependencies. Thus, we have to upgrade the go version. | 101 | basename = "{pn}-licenses.inc" |
699 | if go_version_major == 1 and go_version_minor < 17: | 102 | filename = os.path.join(localfilesdir, basename) |
700 | logger.warning( | 103 | with open(filename, "w") as f: |
701 | "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.") | 104 | f.write("# FROM RECIPETOOL\n") |
702 | go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir, | 105 | extravalues['extrafiles'][f"../{basename}"] = filename |
703 | extravalues, d) | ||
704 | src_uris.append( | ||
705 | "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename)) | ||
706 | 106 | ||
707 | # Check whether the module is vendored. If so, we have nothing to do. | 107 | basename = "{pn}-go-mods.inc" |
708 | # Otherwise we gather all dependencies and add them to the recipe | 108 | filename = os.path.join(localfilesdir, basename) |
709 | if not os.path.exists(os.path.join(srctree, "vendor")): | 109 | with open(filename, "w") as f: |
110 | f.write("# FROM RECIPETOOL\n") | ||
111 | extravalues['extrafiles'][f"../{basename}"] = filename | ||
710 | 112 | ||
711 | # Write additional $BPN-modules.inc file | 113 | # Do generic license handling |
712 | self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d) | 114 | d = bb.data.createCopy(tinfoil.config_data) |
713 | lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"") | 115 | handle_license_vars(srctree, lines_before, handled, extravalues, d) |
714 | lines_before.append("require %s-licenses.inc" % (pn)) | 116 | self.__rewrite_lic_vars(lines_before) |
715 | 117 | ||
716 | self.__rewrite_src_uri(lines_before, ["file://modules.txt"]) | 118 | self.__rewrite_src_uri(lines_before) |
717 | 119 | ||
718 | self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d) | 120 | lines_before.append('require ${BPN}-licenses.inc') |
719 | lines_before.append("require %s-modules.inc" % (pn)) | 121 | lines_before.append('require ${BPN}-go-mods.inc') |
720 | 122 | lines_before.append(f'GO_IMPORT = "{go_import}"') | |
721 | # Do generic license handling | ||
722 | handle_license_vars(srctree, lines_before, handled, extravalues, d) | ||
723 | self.__rewrite_lic_uri(lines_before) | ||
724 | |||
725 | lines_before.append("GO_IMPORT = \"{}\"".format(baseurl)) | ||
726 | lines_before.append("SRCREV_FORMAT = \"${BPN}\"") | ||
727 | 123 | ||
728 | def __update_lines_before(self, updated, newlines, lines_before): | 124 | def __update_lines_before(self, updated, newlines, lines_before): |
729 | if updated: | 125 | if updated: |
@@ -735,9 +131,9 @@ class GoRecipeHandler(RecipeHandler): | |||
735 | lines_before.append(line) | 131 | lines_before.append(line) |
736 | return updated | 132 | return updated |
737 | 133 | ||
738 | def __rewrite_lic_uri(self, lines_before): | 134 | def __rewrite_lic_vars(self, lines_before): |
739 | |||
740 | def varfunc(varname, origvalue, op, newlines): | 135 | def varfunc(varname, origvalue, op, newlines): |
136 | import urllib.parse | ||
741 | if varname == 'LIC_FILES_CHKSUM': | 137 | if varname == 'LIC_FILES_CHKSUM': |
742 | new_licenses = [] | 138 | new_licenses = [] |
743 | licenses = origvalue.split('\\') | 139 | licenses = origvalue.split('\\') |
@@ -762,12 +158,11 @@ class GoRecipeHandler(RecipeHandler): | |||
762 | lines_before, ['LIC_FILES_CHKSUM'], varfunc) | 158 | lines_before, ['LIC_FILES_CHKSUM'], varfunc) |
763 | return self.__update_lines_before(updated, newlines, lines_before) | 159 | return self.__update_lines_before(updated, newlines, lines_before) |
764 | 160 | ||
765 | def __rewrite_src_uri(self, lines_before, additional_uris = []): | 161 | def __rewrite_src_uri(self, lines_before): |
766 | 162 | ||
767 | def varfunc(varname, origvalue, op, newlines): | 163 | def varfunc(varname, origvalue, op, newlines): |
768 | if varname == 'SRC_URI': | 164 | if varname == 'SRC_URI': |
769 | src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"] | 165 | src_uri = ['git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}'] |
770 | src_uri.extend(additional_uris) | ||
771 | return src_uri, None, -1, True | 166 | return src_uri, None, -1, True |
772 | return origvalue, None, 0, True | 167 | return origvalue, None, 0, True |
773 | 168 | ||
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py index 113a89f6a6..8c4cdd5234 100644 --- a/scripts/lib/recipetool/create_npm.py +++ b/scripts/lib/recipetool/create_npm.py | |||
@@ -15,9 +15,9 @@ import bb | |||
15 | from bb.fetch2.npm import NpmEnvironment | 15 | from bb.fetch2.npm import NpmEnvironment |
16 | from bb.fetch2.npm import npm_package | 16 | from bb.fetch2.npm import npm_package |
17 | from bb.fetch2.npmsw import foreach_dependencies | 17 | from bb.fetch2.npmsw import foreach_dependencies |
18 | from oe.license_finder import match_licenses, find_license_files | ||
18 | from recipetool.create import RecipeHandler | 19 | from recipetool.create import RecipeHandler |
19 | from recipetool.create import get_license_md5sums | 20 | from recipetool.create import generate_common_licenses_chksums |
20 | from recipetool.create import guess_license | ||
21 | from recipetool.create import split_pkg_licenses | 21 | from recipetool.create import split_pkg_licenses |
22 | logger = logging.getLogger('recipetool') | 22 | logger = logging.getLogger('recipetool') |
23 | 23 | ||
@@ -112,40 +112,54 @@ class NpmRecipeHandler(RecipeHandler): | |||
112 | """Return the extra license files and the list of packages""" | 112 | """Return the extra license files and the list of packages""" |
113 | licfiles = [] | 113 | licfiles = [] |
114 | packages = {} | 114 | packages = {} |
115 | # Licenses from package.json will point to COMMON_LICENSE_DIR so we need | ||
116 | # to associate them explicitely to packages for split_pkg_licenses() | ||
117 | fallback_licenses = dict() | ||
118 | |||
119 | def _find_package_licenses(destdir): | ||
120 | """Either find license files, or use package.json metadata""" | ||
121 | def _get_licenses_from_package_json(package_json): | ||
122 | with open(os.path.join(srctree, package_json), "r") as f: | ||
123 | data = json.load(f) | ||
124 | if "license" in data: | ||
125 | licenses = data["license"].split(" ") | ||
126 | licenses = [license.strip("()") for license in licenses if license != "OR" and license != "AND"] | ||
127 | return [], licenses | ||
128 | else: | ||
129 | return [package_json], None | ||
115 | 130 | ||
116 | # Handle the parent package | ||
117 | packages["${PN}"] = "" | ||
118 | |||
119 | def _licfiles_append_fallback_readme_files(destdir): | ||
120 | """Append README files as fallback to license files if a license files is missing""" | ||
121 | |||
122 | fallback = True | ||
123 | readmes = [] | ||
124 | basedir = os.path.join(srctree, destdir) | 131 | basedir = os.path.join(srctree, destdir) |
125 | for fn in os.listdir(basedir): | 132 | licfiles = find_license_files(basedir) |
126 | upper = fn.upper() | 133 | if len(licfiles) > 0: |
127 | if upper.startswith("README"): | 134 | return licfiles, None |
128 | fullpath = os.path.join(basedir, fn) | 135 | else: |
129 | readmes.append(fullpath) | 136 | # A license wasn't found in the package directory, so we'll use the package.json metadata |
130 | if upper.startswith("COPYING") or "LICENCE" in upper or "LICENSE" in upper: | 137 | pkg_json = os.path.join(basedir, "package.json") |
131 | fallback = False | 138 | return _get_licenses_from_package_json(pkg_json) |
132 | if fallback: | 139 | |
133 | for readme in readmes: | 140 | def _get_package_licenses(destdir, package): |
134 | licfiles.append(os.path.relpath(readme, srctree)) | 141 | (package_licfiles, package_licenses) = _find_package_licenses(destdir) |
142 | if package_licfiles: | ||
143 | licfiles.extend(package_licfiles) | ||
144 | else: | ||
145 | fallback_licenses[package] = package_licenses | ||
135 | 146 | ||
136 | # Handle the dependencies | 147 | # Handle the dependencies |
137 | def _handle_dependency(name, params, destdir): | 148 | def _handle_dependency(name, params, destdir): |
138 | deptree = destdir.split('node_modules/') | 149 | deptree = destdir.split('node_modules/') |
139 | suffix = "-".join([npm_package(dep) for dep in deptree]) | 150 | suffix = "-".join([npm_package(dep) for dep in deptree]) |
140 | packages["${PN}" + suffix] = destdir | 151 | packages["${PN}" + suffix] = destdir |
141 | _licfiles_append_fallback_readme_files(destdir) | 152 | _get_package_licenses(destdir, "${PN}" + suffix) |
142 | 153 | ||
143 | with open(shrinkwrap_file, "r") as f: | 154 | with open(shrinkwrap_file, "r") as f: |
144 | shrinkwrap = json.load(f) | 155 | shrinkwrap = json.load(f) |
145 | |||
146 | foreach_dependencies(shrinkwrap, _handle_dependency, dev) | 156 | foreach_dependencies(shrinkwrap, _handle_dependency, dev) |
147 | 157 | ||
148 | return licfiles, packages | 158 | # Handle the parent package |
159 | packages["${PN}"] = "" | ||
160 | _get_package_licenses(srctree, "${PN}") | ||
161 | |||
162 | return licfiles, packages, fallback_licenses | ||
149 | 163 | ||
150 | # Handle the peer dependencies | 164 | # Handle the peer dependencies |
151 | def _handle_peer_dependency(self, shrinkwrap_file): | 165 | def _handle_peer_dependency(self, shrinkwrap_file): |
@@ -266,36 +280,12 @@ class NpmRecipeHandler(RecipeHandler): | |||
266 | fetcher.unpack(srctree) | 280 | fetcher.unpack(srctree) |
267 | 281 | ||
268 | bb.note("Handling licences ...") | 282 | bb.note("Handling licences ...") |
269 | (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev) | 283 | (licfiles, packages, fallback_licenses) = self._handle_licenses(srctree, shrinkwrap_file, dev) |
270 | 284 | licvalues = match_licenses(licfiles, srctree, d) | |
271 | def _guess_odd_license(licfiles): | 285 | split_pkg_licenses(licvalues, packages, lines_after, fallback_licenses) |
272 | import bb | 286 | fallback_licenses_flat = [license for sublist in fallback_licenses.values() for license in sublist] |
273 | 287 | extravalues["LIC_FILES_CHKSUM"] = generate_common_licenses_chksums(fallback_licenses_flat, d) | |
274 | md5sums = get_license_md5sums(d, linenumbers=True) | 288 | extravalues["LICENSE"] = fallback_licenses_flat |
275 | |||
276 | chksums = [] | ||
277 | licenses = [] | ||
278 | for licfile in licfiles: | ||
279 | f = os.path.join(srctree, licfile) | ||
280 | md5value = bb.utils.md5_file(f) | ||
281 | (license, beginline, endline, md5) = md5sums.get(md5value, | ||
282 | (None, "", "", "")) | ||
283 | if not license: | ||
284 | license = "Unknown" | ||
285 | logger.info("Please add the following line for '%s' to a " | ||
286 | "'lib/recipetool/licenses.csv' and replace `Unknown`, " | ||
287 | "`X`, `Y` and `MD5` with the license, begin line, " | ||
288 | "end line and partial MD5 checksum:\n" \ | ||
289 | "%s,Unknown,X,Y,MD5" % (licfile, md5value)) | ||
290 | chksums.append("file://%s%s%s;md5=%s" % (licfile, | ||
291 | ";beginline=%s" % (beginline) if beginline else "", | ||
292 | ";endline=%s" % (endline) if endline else "", | ||
293 | md5 if md5 else md5value)) | ||
294 | licenses.append((license, licfile, md5value)) | ||
295 | return (licenses, chksums) | ||
296 | |||
297 | (licenses, extravalues["LIC_FILES_CHKSUM"]) = _guess_odd_license(licfiles) | ||
298 | split_pkg_licenses([*licenses, *guess_license(srctree, d)], packages, lines_after) | ||
299 | 289 | ||
300 | classes.append("npm") | 290 | classes.append("npm") |
301 | handled.append("buildsystem") | 291 | handled.append("buildsystem") |
diff --git a/scripts/lib/recipetool/licenses.csv b/scripts/lib/recipetool/licenses.csv deleted file mode 100644 index 80851111b3..0000000000 --- a/scripts/lib/recipetool/licenses.csv +++ /dev/null | |||
@@ -1,37 +0,0 @@ | |||
1 | 0636e73ff0215e8d672dc4c32c317bb3,GPL-2.0-only | ||
2 | 12f884d2ae1ff87c09e5b7ccc2c4ca7e,GPL-2.0-only | ||
3 | 18810669f13b87348459e611d31ab760,GPL-2.0-only | ||
4 | 252890d9eee26aab7b432e8b8a616475,LGPL-2.0-only | ||
5 | 2d5025d4aa3495befef8f17206a5b0a1,LGPL-2.1-only | ||
6 | 3214f080875748938ba060314b4f727d,LGPL-2.0-only | ||
7 | 385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0-only | ||
8 | 393a5ca445f6965873eca0259a17f833,GPL-2.0-only | ||
9 | 3b83ef96387f14655fc854ddc3c6bd57,Apache-2.0 | ||
10 | 3bf50002aefd002f49e7bb854063f7e7,LGPL-2.0-only | ||
11 | 4325afd396febcb659c36b49533135d4,GPL-2.0-only | ||
12 | 4fbd65380cdd255951079008b364516c,LGPL-2.1-only | ||
13 | 54c7042be62e169199200bc6477f04d1,BSD-3-Clause | ||
14 | 55ca817ccb7d5b5b66355690e9abc605,LGPL-2.0-only | ||
15 | 59530bdf33659b29e73d4adb9f9f6552,GPL-2.0-only | ||
16 | 5f30f0716dfdd0d91eb439ebec522ec2,LGPL-2.0-only | ||
17 | 6a6a8e020838b23406c81b19c1d46df6,LGPL-3.0-only | ||
18 | 751419260aa954499f7abaabaa882bbe,GPL-2.0-only | ||
19 | 7fbc338309ac38fefcd64b04bb903e34,LGPL-2.1-only | ||
20 | 8ca43cbc842c2336e835926c2166c28b,GPL-2.0-only | ||
21 | 94d55d512a9ba36caa9b7df079bae19f,GPL-2.0-only | ||
22 | 9ac2e7cff1ddaf48b6eab6028f23ef88,GPL-2.0-only | ||
23 | 9f604d8a4f8e74f4f5140845a21b6674,LGPL-2.0-only | ||
24 | a6f89e2100d9b6cdffcea4f398e37343,LGPL-2.1-only | ||
25 | b234ee4d69f5fce4486a80fdaf4a4263,GPL-2.0-only | ||
26 | bbb461211a33b134d42ed5ee802b37ff,LGPL-2.1-only | ||
27 | bfe1f75d606912a4111c90743d6c7325,MPL-1.1-only | ||
28 | c93c0550bd3173f4504b2cbd8991e50b,GPL-2.0-only | ||
29 | d32239bcb673463ab874e80d47fae504,GPL-3.0-only | ||
30 | d7810fab7487fb0aad327b76f1be7cd7,GPL-2.0-only | ||
31 | d8045f3b8f929c1cb29a1e3fd737b499,LGPL-2.1-only | ||
32 | db979804f025cf55aabec7129cb671ed,LGPL-2.0-only | ||
33 | eb723b61539feef013de476e68b5c50a,GPL-2.0-only | ||
34 | ebb5c50ab7cab4baeffba14977030c07,GPL-2.0-only | ||
35 | f27defe1e96c2e1ecd4e0c9be8967949,GPL-3.0-only | ||
36 | fad9b3332be894bab9bc501572864b29,LGPL-2.1-only | ||
37 | fbc093901857fcd118f065f900982c24,LGPL-2.1-only | ||
diff --git a/scripts/lib/resulttool/junit.py b/scripts/lib/resulttool/junit.py new file mode 100644 index 0000000000..c7a53dc550 --- /dev/null +++ b/scripts/lib/resulttool/junit.py | |||
@@ -0,0 +1,77 @@ | |||
1 | # resulttool - report test results in JUnit XML format | ||
2 | # | ||
3 | # Copyright (c) 2024, Siemens AG. | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | # | ||
7 | |||
8 | import os | ||
9 | import re | ||
10 | import xml.etree.ElementTree as ET | ||
11 | import resulttool.resultutils as resultutils | ||
12 | |||
13 | def junit(args, logger): | ||
14 | testresults = resultutils.load_resultsdata(args.json_file, configmap=resultutils.store_map) | ||
15 | |||
16 | total_time = 0 | ||
17 | skipped = 0 | ||
18 | failures = 0 | ||
19 | errors = 0 | ||
20 | |||
21 | for tests in testresults.values(): | ||
22 | results = tests[next(reversed(tests))].get("result", {}) | ||
23 | |||
24 | for result_id, result in results.items(): | ||
25 | # filter out ptestresult.rawlogs and ptestresult.sections | ||
26 | if re.search(r'\.test_', result_id): | ||
27 | total_time += result.get("duration", 0) | ||
28 | |||
29 | if result['status'] == "FAILED": | ||
30 | failures += 1 | ||
31 | elif result['status'] == "ERROR": | ||
32 | errors += 1 | ||
33 | elif result['status'] == "SKIPPED": | ||
34 | skipped += 1 | ||
35 | |||
36 | testsuites_node = ET.Element("testsuites") | ||
37 | testsuites_node.set("time", "%s" % total_time) | ||
38 | testsuite_node = ET.SubElement(testsuites_node, "testsuite") | ||
39 | testsuite_node.set("name", "Testimage") | ||
40 | testsuite_node.set("time", "%s" % total_time) | ||
41 | testsuite_node.set("tests", "%s" % len(results)) | ||
42 | testsuite_node.set("failures", "%s" % failures) | ||
43 | testsuite_node.set("errors", "%s" % errors) | ||
44 | testsuite_node.set("skipped", "%s" % skipped) | ||
45 | |||
46 | for result_id, result in results.items(): | ||
47 | if re.search(r'\.test_', result_id): | ||
48 | testcase_node = ET.SubElement(testsuite_node, "testcase", { | ||
49 | "name": result_id, | ||
50 | "classname": "Testimage", | ||
51 | "time": str(result['duration']) | ||
52 | }) | ||
53 | if result['status'] == "SKIPPED": | ||
54 | ET.SubElement(testcase_node, "skipped", message=result['log']) | ||
55 | elif result['status'] == "FAILED": | ||
56 | ET.SubElement(testcase_node, "failure", message=result['log']) | ||
57 | elif result['status'] == "ERROR": | ||
58 | ET.SubElement(testcase_node, "error", message=result['log']) | ||
59 | |||
60 | tree = ET.ElementTree(testsuites_node) | ||
61 | |||
62 | if args.junit_xml_path is None: | ||
63 | args.junit_xml_path = os.environ['BUILDDIR'] + '/tmp/log/oeqa/junit.xml' | ||
64 | tree.write(args.junit_xml_path, encoding='UTF-8', xml_declaration=True) | ||
65 | |||
66 | logger.info('Saved JUnit XML report as %s' % args.junit_xml_path) | ||
67 | |||
68 | def register_commands(subparsers): | ||
69 | """Register subcommands from this plugin""" | ||
70 | parser_build = subparsers.add_parser('junit', help='create test report in JUnit XML format', | ||
71 | description='generate unit test report in JUnit XML format based on the latest test results in the testresults.json.', | ||
72 | group='analysis') | ||
73 | parser_build.set_defaults(func=junit) | ||
74 | parser_build.add_argument('json_file', | ||
75 | help='json file should point to the testresults.json') | ||
76 | parser_build.add_argument('-j', '--junit_xml_path', | ||
77 | help='junit xml path allows setting the path of the generated test report. The default location is <build_dir>/tmp/log/oeqa/junit.xml') | ||
diff --git a/scripts/lib/resulttool/manualexecution.py b/scripts/lib/resulttool/manualexecution.py index ecb27c5933..ae0861ac6b 100755 --- a/scripts/lib/resulttool/manualexecution.py +++ b/scripts/lib/resulttool/manualexecution.py | |||
@@ -22,7 +22,7 @@ def load_json_file(f): | |||
22 | def write_json_file(f, json_data): | 22 | def write_json_file(f, json_data): |
23 | os.makedirs(os.path.dirname(f), exist_ok=True) | 23 | os.makedirs(os.path.dirname(f), exist_ok=True) |
24 | with open(f, 'w') as filedata: | 24 | with open(f, 'w') as filedata: |
25 | filedata.write(json.dumps(json_data, sort_keys=True, indent=4)) | 25 | filedata.write(json.dumps(json_data, sort_keys=True, indent=1)) |
26 | 26 | ||
27 | class ManualTestRunner(object): | 27 | class ManualTestRunner(object): |
28 | 28 | ||
diff --git a/scripts/lib/resulttool/regression.py b/scripts/lib/resulttool/regression.py index 10e7d13841..33b3119c54 100644 --- a/scripts/lib/resulttool/regression.py +++ b/scripts/lib/resulttool/regression.py | |||
@@ -212,6 +212,8 @@ def compare_result(logger, base_name, target_name, base_result, target_result, d | |||
212 | 212 | ||
213 | if base_result and target_result: | 213 | if base_result and target_result: |
214 | for k in base_result: | 214 | for k in base_result: |
215 | if k in ['ptestresult.rawlogs', 'ptestresult.sections']: | ||
216 | continue | ||
215 | base_testcase = base_result[k] | 217 | base_testcase = base_result[k] |
216 | base_status = base_testcase.get('status') | 218 | base_status = base_testcase.get('status') |
217 | if base_status: | 219 | if base_status: |
@@ -422,6 +424,7 @@ def register_commands(subparsers): | |||
422 | help='(optional) filter the base results to this result ID') | 424 | help='(optional) filter the base results to this result ID') |
423 | parser_build.add_argument('-t', '--target-result-id', default='', | 425 | parser_build.add_argument('-t', '--target-result-id', default='', |
424 | help='(optional) filter the target results to this result ID') | 426 | help='(optional) filter the target results to this result ID') |
427 | parser_build.add_argument('-l', '--limit', default=REGRESSIONS_DISPLAY_LIMIT, help="Maximum number of changes to display per test. Can be set to 0 to print all changes") | ||
425 | 428 | ||
426 | parser_build = subparsers.add_parser('regression-git', help='regression git analysis', | 429 | parser_build = subparsers.add_parser('regression-git', help='regression git analysis', |
427 | description='regression analysis comparing base result set to target ' | 430 | description='regression analysis comparing base result set to target ' |
diff --git a/scripts/lib/resulttool/report.py b/scripts/lib/resulttool/report.py index a349510ab8..1c100b00ab 100644 --- a/scripts/lib/resulttool/report.py +++ b/scripts/lib/resulttool/report.py | |||
@@ -256,7 +256,7 @@ class ResultsTextReport(object): | |||
256 | if selected_test_case_only: | 256 | if selected_test_case_only: |
257 | print_selected_testcase_result(raw_results, selected_test_case_only) | 257 | print_selected_testcase_result(raw_results, selected_test_case_only) |
258 | else: | 258 | else: |
259 | print(json.dumps(raw_results, sort_keys=True, indent=4)) | 259 | print(json.dumps(raw_results, sort_keys=True, indent=1)) |
260 | else: | 260 | else: |
261 | print('Could not find raw test result for %s' % raw_test) | 261 | print('Could not find raw test result for %s' % raw_test) |
262 | return 0 | 262 | return 0 |
diff --git a/scripts/lib/resulttool/resultutils.py b/scripts/lib/resulttool/resultutils.py index c5521d81bd..b8fc79a6ac 100644 --- a/scripts/lib/resulttool/resultutils.py +++ b/scripts/lib/resulttool/resultutils.py | |||
@@ -14,8 +14,11 @@ import scriptpath | |||
14 | import copy | 14 | import copy |
15 | import urllib.request | 15 | import urllib.request |
16 | import posixpath | 16 | import posixpath |
17 | import logging | ||
17 | scriptpath.add_oe_lib_path() | 18 | scriptpath.add_oe_lib_path() |
18 | 19 | ||
20 | logger = logging.getLogger('resulttool') | ||
21 | |||
19 | flatten_map = { | 22 | flatten_map = { |
20 | "oeselftest": [], | 23 | "oeselftest": [], |
21 | "runtime": [], | 24 | "runtime": [], |
@@ -31,13 +34,19 @@ regression_map = { | |||
31 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE'] | 34 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE'] |
32 | } | 35 | } |
33 | store_map = { | 36 | store_map = { |
34 | "oeselftest": ['TEST_TYPE'], | 37 | "oeselftest": ['TEST_TYPE', 'TESTSERIES', 'MACHINE'], |
35 | "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'], | 38 | "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'], |
36 | "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], | 39 | "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], |
37 | "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], | 40 | "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], |
38 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME'] | 41 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME'] |
39 | } | 42 | } |
40 | 43 | ||
44 | rawlog_sections = { | ||
45 | "ptestresult.rawlogs": "ptest", | ||
46 | "ltpresult.rawlogs": "ltp", | ||
47 | "ltpposixresult.rawlogs": "ltpposix" | ||
48 | } | ||
49 | |||
41 | def is_url(p): | 50 | def is_url(p): |
42 | """ | 51 | """ |
43 | Helper for determining if the given path is a URL | 52 | Helper for determining if the given path is a URL |
@@ -108,21 +117,57 @@ def filter_resultsdata(results, resultid): | |||
108 | newresults[r][i] = results[r][i] | 117 | newresults[r][i] = results[r][i] |
109 | return newresults | 118 | return newresults |
110 | 119 | ||
111 | def strip_ptestresults(results): | 120 | def strip_logs(results): |
112 | newresults = copy.deepcopy(results) | 121 | newresults = copy.deepcopy(results) |
113 | #for a in newresults2: | ||
114 | # newresults = newresults2[a] | ||
115 | for res in newresults: | 122 | for res in newresults: |
116 | if 'result' not in newresults[res]: | 123 | if 'result' not in newresults[res]: |
117 | continue | 124 | continue |
118 | if 'ptestresult.rawlogs' in newresults[res]['result']: | 125 | for logtype in rawlog_sections: |
119 | del newresults[res]['result']['ptestresult.rawlogs'] | 126 | if logtype in newresults[res]['result']: |
127 | del newresults[res]['result'][logtype] | ||
120 | if 'ptestresult.sections' in newresults[res]['result']: | 128 | if 'ptestresult.sections' in newresults[res]['result']: |
121 | for i in newresults[res]['result']['ptestresult.sections']: | 129 | for i in newresults[res]['result']['ptestresult.sections']: |
122 | if 'log' in newresults[res]['result']['ptestresult.sections'][i]: | 130 | if 'log' in newresults[res]['result']['ptestresult.sections'][i]: |
123 | del newresults[res]['result']['ptestresult.sections'][i]['log'] | 131 | del newresults[res]['result']['ptestresult.sections'][i]['log'] |
124 | return newresults | 132 | return newresults |
125 | 133 | ||
134 | # For timing numbers, crazy amounts of precision don't make sense and just confuse | ||
135 | # the logs. For numbers over 1, trim to 3 decimal places, for numbers less than 1, | ||
136 | # trim to 4 significant digits | ||
137 | def trim_durations(results): | ||
138 | for res in results: | ||
139 | if 'result' not in results[res]: | ||
140 | continue | ||
141 | for entry in results[res]['result']: | ||
142 | if 'duration' in results[res]['result'][entry]: | ||
143 | duration = results[res]['result'][entry]['duration'] | ||
144 | if duration > 1: | ||
145 | results[res]['result'][entry]['duration'] = float("%.3f" % duration) | ||
146 | elif duration < 1: | ||
147 | results[res]['result'][entry]['duration'] = float("%.4g" % duration) | ||
148 | return results | ||
149 | |||
150 | def handle_cleanups(results): | ||
151 | # Remove pointless path duplication from old format reproducibility results | ||
152 | for res2 in results: | ||
153 | try: | ||
154 | section = results[res2]['result']['reproducible']['files'] | ||
155 | for pkgtype in section: | ||
156 | for filelist in section[pkgtype].copy(): | ||
157 | if section[pkgtype][filelist] and type(section[pkgtype][filelist][0]) == dict: | ||
158 | newlist = [] | ||
159 | for entry in section[pkgtype][filelist]: | ||
160 | newlist.append(entry["reference"].split("/./")[1]) | ||
161 | section[pkgtype][filelist] = newlist | ||
162 | |||
163 | except KeyError: | ||
164 | pass | ||
165 | # Remove pointless duplicate rawlogs data | ||
166 | try: | ||
167 | del results[res2]['result']['reproducible.rawlogs'] | ||
168 | except KeyError: | ||
169 | pass | ||
170 | |||
126 | def decode_log(logdata): | 171 | def decode_log(logdata): |
127 | if isinstance(logdata, str): | 172 | if isinstance(logdata, str): |
128 | return logdata | 173 | return logdata |
@@ -155,9 +200,6 @@ def generic_get_rawlogs(sectname, results): | |||
155 | return None | 200 | return None |
156 | return decode_log(results[sectname]['log']) | 201 | return decode_log(results[sectname]['log']) |
157 | 202 | ||
158 | def ptestresult_get_rawlogs(results): | ||
159 | return generic_get_rawlogs('ptestresult.rawlogs', results) | ||
160 | |||
161 | def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False): | 203 | def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False): |
162 | for res in results: | 204 | for res in results: |
163 | if res: | 205 | if res: |
@@ -167,16 +209,20 @@ def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, p | |||
167 | os.makedirs(os.path.dirname(dst), exist_ok=True) | 209 | os.makedirs(os.path.dirname(dst), exist_ok=True) |
168 | resultsout = results[res] | 210 | resultsout = results[res] |
169 | if not ptestjson: | 211 | if not ptestjson: |
170 | resultsout = strip_ptestresults(results[res]) | 212 | resultsout = strip_logs(results[res]) |
213 | trim_durations(resultsout) | ||
214 | handle_cleanups(resultsout) | ||
171 | with open(dst, 'w') as f: | 215 | with open(dst, 'w') as f: |
172 | f.write(json.dumps(resultsout, sort_keys=True, indent=4)) | 216 | f.write(json.dumps(resultsout, sort_keys=True, indent=1)) |
173 | for res2 in results[res]: | 217 | for res2 in results[res]: |
174 | if ptestlogs and 'result' in results[res][res2]: | 218 | if ptestlogs and 'result' in results[res][res2]: |
175 | seriesresults = results[res][res2]['result'] | 219 | seriesresults = results[res][res2]['result'] |
176 | rawlogs = ptestresult_get_rawlogs(seriesresults) | 220 | for logtype in rawlog_sections: |
177 | if rawlogs is not None: | 221 | logdata = generic_get_rawlogs(logtype, seriesresults) |
178 | with open(dst.replace(fn, "ptest-raw.log"), "w+") as f: | 222 | if logdata is not None: |
179 | f.write(rawlogs) | 223 | logger.info("Extracting " + rawlog_sections[logtype] + "-raw.log") |
224 | with open(dst.replace(fn, rawlog_sections[logtype] + "-raw.log"), "w+") as f: | ||
225 | f.write(logdata) | ||
180 | if 'ptestresult.sections' in seriesresults: | 226 | if 'ptestresult.sections' in seriesresults: |
181 | for i in seriesresults['ptestresult.sections']: | 227 | for i in seriesresults['ptestresult.sections']: |
182 | sectionlog = ptestresult_get_log(seriesresults, i) | 228 | sectionlog = ptestresult_get_log(seriesresults, i) |
diff --git a/scripts/lib/resulttool/store.py b/scripts/lib/resulttool/store.py index e0951f0a8f..b143334e69 100644 --- a/scripts/lib/resulttool/store.py +++ b/scripts/lib/resulttool/store.py | |||
@@ -65,18 +65,35 @@ def store(args, logger): | |||
65 | 65 | ||
66 | for r in revisions: | 66 | for r in revisions: |
67 | results = revisions[r] | 67 | results = revisions[r] |
68 | if args.revision and r[0] != args.revision: | ||
69 | logger.info('skipping %s as non-matching' % r[0]) | ||
70 | continue | ||
68 | keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]} | 71 | keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]} |
69 | subprocess.check_call(["find", tempdir, "!", "-path", "./.git/*", "-delete"]) | 72 | subprocess.check_call(["find", tempdir, "-name", "testresults.json", "!", "-path", "./.git/*", "-delete"]) |
70 | resultutils.save_resultsdata(results, tempdir, ptestlogs=True) | 73 | resultutils.save_resultsdata(results, tempdir, ptestlogs=True) |
71 | 74 | ||
72 | logger.info('Storing test result into git repository %s' % args.git_dir) | 75 | logger.info('Storing test result into git repository %s' % args.git_dir) |
73 | 76 | ||
74 | gitarchive.gitarchive(tempdir, args.git_dir, False, False, | 77 | excludes = [] |
78 | if args.logfile_archive: | ||
79 | excludes = ['*.log', "*.log.zst"] | ||
80 | |||
81 | tagname = gitarchive.gitarchive(tempdir, args.git_dir, False, False, | ||
75 | "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", | 82 | "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", |
76 | False, "{branch}/{commit_count}-g{commit}/{tag_number}", | 83 | False, "{branch}/{commit_count}-g{commit}/{tag_number}", |
77 | 'Test run #{tag_number} of {branch}:{commit}', '', | 84 | 'Test run #{tag_number} of {branch}:{commit}', '', |
78 | [], [], False, keywords, logger) | 85 | excludes, [], False, keywords, logger) |
79 | 86 | ||
87 | if args.logfile_archive: | ||
88 | logdir = args.logfile_archive + "/" + tagname | ||
89 | shutil.copytree(tempdir, logdir) | ||
90 | os.chmod(logdir, 0o755) | ||
91 | for root, dirs, files in os.walk(logdir): | ||
92 | for name in files: | ||
93 | if not name.endswith(".log"): | ||
94 | continue | ||
95 | f = os.path.join(root, name) | ||
96 | subprocess.run(["zstd", f, "--rm"], check=True, capture_output=True) | ||
80 | finally: | 97 | finally: |
81 | subprocess.check_call(["rm", "-rf", tempdir]) | 98 | subprocess.check_call(["rm", "-rf", tempdir]) |
82 | 99 | ||
@@ -102,3 +119,7 @@ def register_commands(subparsers): | |||
102 | help='add executed-by configuration to each result file') | 119 | help='add executed-by configuration to each result file') |
103 | parser_build.add_argument('-t', '--extra-test-env', default='', | 120 | parser_build.add_argument('-t', '--extra-test-env', default='', |
104 | help='add extra test environment data to each result file configuration') | 121 | help='add extra test environment data to each result file configuration') |
122 | parser_build.add_argument('-r', '--revision', default='', | ||
123 | help='only store data for the specified revision') | ||
124 | parser_build.add_argument('-l', '--logfile-archive', default='', | ||
125 | help='directory to separately archive log files along with a copy of the results') | ||
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py index f23e53cba9..32e749dbb1 100644 --- a/scripts/lib/scriptutils.py +++ b/scripts/lib/scriptutils.py | |||
@@ -179,8 +179,13 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr | |||
179 | f.write('SRCREV = "%s"\n' % srcrev) | 179 | f.write('SRCREV = "%s"\n' % srcrev) |
180 | f.write('PV = "0.0+"\n') | 180 | f.write('PV = "0.0+"\n') |
181 | f.write('WORKDIR = "%s"\n' % tmpworkdir) | 181 | f.write('WORKDIR = "%s"\n' % tmpworkdir) |
182 | f.write('UNPACKDIR = "%s"\n' % destdir) | ||
183 | |||
182 | # Set S out of the way so it doesn't get created under the workdir | 184 | # Set S out of the way so it doesn't get created under the workdir |
183 | f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) | 185 | s_dir = os.path.join(tmpdir, 'emptysrc') |
186 | bb.utils.mkdirhier(s_dir) | ||
187 | f.write('S = "%s"\n' % s_dir) | ||
188 | |||
184 | if not mirrors: | 189 | if not mirrors: |
185 | # We do not need PREMIRRORS since we are almost certainly | 190 | # We do not need PREMIRRORS since we are almost certainly |
186 | # fetching new source rather than something that has already | 191 | # fetching new source rather than something that has already |
@@ -232,10 +237,6 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr | |||
232 | if e.errno != errno.ENOTEMPTY: | 237 | if e.errno != errno.ENOTEMPTY: |
233 | raise | 238 | raise |
234 | 239 | ||
235 | bb.utils.mkdirhier(destdir) | ||
236 | for fn in os.listdir(tmpworkdir): | ||
237 | shutil.move(os.path.join(tmpworkdir, fn), destdir) | ||
238 | |||
239 | finally: | 240 | finally: |
240 | if not preserve_tmp: | 241 | if not preserve_tmp: |
241 | shutil.rmtree(tmpdir) | 242 | shutil.rmtree(tmpdir) |
@@ -271,12 +272,3 @@ def is_src_url(param): | |||
271 | return True | 272 | return True |
272 | return False | 273 | return False |
273 | 274 | ||
274 | def filter_src_subdirs(pth): | ||
275 | """ | ||
276 | Filter out subdirectories of initial unpacked source trees that we do not care about. | ||
277 | Used by devtool and recipetool. | ||
278 | """ | ||
279 | dirlist = os.listdir(pth) | ||
280 | filterout = ['git.indirectionsymlink', 'source-date-epoch', 'sstate-install-recipe_qa'] | ||
281 | dirlist = [x for x in dirlist if x not in filterout] | ||
282 | return dirlist | ||
diff --git a/scripts/lib/wic/canned-wks/common.wks.inc b/scripts/lib/wic/canned-wks/common.wks.inc index 89880b417b..4a440ddafe 100644 --- a/scripts/lib/wic/canned-wks/common.wks.inc +++ b/scripts/lib/wic/canned-wks/common.wks.inc | |||
@@ -1,3 +1,3 @@ | |||
1 | # This file is included into 3 canned wks files from this directory | 1 | # This file is included into 3 canned wks files from this directory |
2 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 2 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
3 | part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024 | 3 | part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024 |
diff --git a/scripts/lib/wic/canned-wks/directdisk-gpt.wks b/scripts/lib/wic/canned-wks/directdisk-gpt.wks index 8d7d8de6ea..cb640056f1 100644 --- a/scripts/lib/wic/canned-wks/directdisk-gpt.wks +++ b/scripts/lib/wic/canned-wks/directdisk-gpt.wks | |||
@@ -3,7 +3,7 @@ | |||
3 | # can directly dd to boot media. | 3 | # can directly dd to boot media. |
4 | 4 | ||
5 | 5 | ||
6 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 6 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid | 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid |
8 | 8 | ||
9 | bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8" | 9 | bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8" |
diff --git a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks index f61d941d6d..4fd1999ffb 100644 --- a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks +++ b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks | |||
@@ -15,7 +15,7 @@ | |||
15 | # | 15 | # |
16 | # - or any combinations of -r and --rootfs command line options | 16 | # - or any combinations of -r and --rootfs command line options |
17 | 17 | ||
18 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 18 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
19 | part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024 | 19 | part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024 |
20 | part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024 | 20 | part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024 |
21 | 21 | ||
diff --git a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in index 2fd286ff98..5211972955 100644 --- a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in +++ b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in | |||
@@ -1,3 +1,3 @@ | |||
1 | bootloader --ptable gpt | 1 | bootloader --ptable gpt |
2 | part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.1 | 2 | part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.2 |
3 | part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ | 3 | part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ |
diff --git a/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in new file mode 100644 index 0000000000..cac0fa32cd --- /dev/null +++ b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in | |||
@@ -0,0 +1,3 @@ | |||
1 | bootloader --ptable gpt --timeout=5 | ||
2 | part /boot --source bootimg_efi --sourceparams="loader=${EFI_PROVIDER}" --label boot --active --align 1024 --use-uuid --part-name="ESP" --part-type=C12A7328-F81F-11D2-BA4B-00A0C93EC93B --overhead-factor=1 | ||
3 | part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ | ||
diff --git a/scripts/lib/wic/canned-wks/mkefidisk.wks b/scripts/lib/wic/canned-wks/mkefidisk.wks index 9f534fe184..16dfe76dfe 100644 --- a/scripts/lib/wic/canned-wks/mkefidisk.wks +++ b/scripts/lib/wic/canned-wks/mkefidisk.wks | |||
@@ -2,10 +2,10 @@ | |||
2 | # long-description: Creates a partitioned EFI disk image that the user | 2 | # long-description: Creates a partitioned EFI disk image that the user |
3 | # can directly dd to boot media. | 3 | # can directly dd to boot media. |
4 | 4 | ||
5 | part /boot --source bootimg-efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024 | 5 | part /boot --source bootimg_efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024 |
6 | 6 | ||
7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid | 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid |
8 | 8 | ||
9 | part swap --ondisk sda --size 44 --label swap1 --fstype=swap | 9 | part swap --ondisk sda --size 44 --label swap1 --fstype=swap |
10 | 10 | ||
11 | bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=ttyS0,115200 console=tty0" | 11 | bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=${KERNEL_CONSOLE} console=tty0" |
diff --git a/scripts/lib/wic/canned-wks/mkhybridiso.wks b/scripts/lib/wic/canned-wks/mkhybridiso.wks index 48c5ac4791..c3a030e5b4 100644 --- a/scripts/lib/wic/canned-wks/mkhybridiso.wks +++ b/scripts/lib/wic/canned-wks/mkhybridiso.wks | |||
@@ -2,6 +2,6 @@ | |||
2 | # long-description: Creates an EFI and legacy bootable hybrid ISO image | 2 | # long-description: Creates an EFI and legacy bootable hybrid ISO image |
3 | # which can be used on optical media as well as USB media. | 3 | # which can be used on optical media as well as USB media. |
4 | 4 | ||
5 | part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO | 5 | part /boot --source isoimage_isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO |
6 | 6 | ||
7 | bootloader --timeout=15 --append="" | 7 | bootloader --timeout=15 --append="" |
diff --git a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks index 63bc4dab6a..f9f8044f7d 100644 --- a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks +++ b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks | |||
@@ -2,5 +2,5 @@ | |||
2 | # long-description: Creates a partitioned SD card image. Boot files | 2 | # long-description: Creates a partitioned SD card image. Boot files |
3 | # are located in the first vfat partition. | 3 | # are located in the first vfat partition. |
4 | 4 | ||
5 | part /boot --source bootimg-partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16 | 5 | part /boot --source bootimg_partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16 |
6 | part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4 | 6 | part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4 |
diff --git a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks index 95d7b97a60..3fb2c0e35f 100644 --- a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks +++ b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks | |||
@@ -2,7 +2,7 @@ | |||
2 | # long-description: Creates a partitioned EFI disk image that the user | 2 | # long-description: Creates a partitioned EFI disk image that the user |
3 | # can directly dd to boot media. The selected bootloader is systemd-boot. | 3 | # can directly dd to boot media. The selected bootloader is systemd-boot. |
4 | 4 | ||
5 | part /boot --source bootimg-efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid | 5 | part /boot --source bootimg_efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid |
6 | 6 | ||
7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid | 7 | part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid |
8 | 8 | ||
diff --git a/scripts/lib/wic/engine.py b/scripts/lib/wic/engine.py index 674ccfc244..b9e60cbe4e 100644 --- a/scripts/lib/wic/engine.py +++ b/scripts/lib/wic/engine.py | |||
@@ -180,6 +180,8 @@ def wic_create(wks_file, rootfs_dir, bootimg_dir, kernel_dir, | |||
180 | os.makedirs(options.outdir) | 180 | os.makedirs(options.outdir) |
181 | 181 | ||
182 | pname = options.imager | 182 | pname = options.imager |
183 | # Don't support '-' in plugin names | ||
184 | pname = pname.replace("-", "_") | ||
183 | plugin_class = PluginMgr.get_plugins('imager').get(pname) | 185 | plugin_class = PluginMgr.get_plugins('imager').get(pname) |
184 | if not plugin_class: | 186 | if not plugin_class: |
185 | raise WicError('Unknown plugin: %s' % pname) | 187 | raise WicError('Unknown plugin: %s' % pname) |
@@ -232,6 +234,16 @@ class Disk: | |||
232 | self._psector_size = None | 234 | self._psector_size = None |
233 | self._ptable_format = None | 235 | self._ptable_format = None |
234 | 236 | ||
237 | # define sector size | ||
238 | sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE') | ||
239 | if sector_size_str is not None: | ||
240 | try: | ||
241 | self.sector_size = int(sector_size_str) | ||
242 | except ValueError: | ||
243 | self.sector_size = None | ||
244 | else: | ||
245 | self.sector_size = None | ||
246 | |||
235 | # find parted | 247 | # find parted |
236 | # read paths from $PATH environment variable | 248 | # read paths from $PATH environment variable |
237 | # if it fails, use hardcoded paths | 249 | # if it fails, use hardcoded paths |
@@ -258,7 +270,13 @@ class Disk: | |||
258 | def get_partitions(self): | 270 | def get_partitions(self): |
259 | if self._partitions is None: | 271 | if self._partitions is None: |
260 | self._partitions = OrderedDict() | 272 | self._partitions = OrderedDict() |
261 | out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath)) | 273 | |
274 | if self.sector_size is not None: | ||
275 | out = exec_cmd("export PARTED_SECTOR_SIZE=%d; %s -sm %s unit B print" % \ | ||
276 | (self.sector_size, self.parted, self.imagepath), True) | ||
277 | else: | ||
278 | out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath)) | ||
279 | |||
262 | parttype = namedtuple("Part", "pnum start end size fstype") | 280 | parttype = namedtuple("Part", "pnum start end size fstype") |
263 | splitted = out.splitlines() | 281 | splitted = out.splitlines() |
264 | # skip over possible errors in exec_cmd output | 282 | # skip over possible errors in exec_cmd output |
@@ -359,7 +377,7 @@ class Disk: | |||
359 | Remove files/dirs and their contents from the partition. | 377 | Remove files/dirs and their contents from the partition. |
360 | This only applies to ext* partition. | 378 | This only applies to ext* partition. |
361 | """ | 379 | """ |
362 | abs_path = re.sub('\/\/+', '/', path) | 380 | abs_path = re.sub(r'\/\/+', '/', path) |
363 | cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs, | 381 | cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs, |
364 | self._get_part_image(pnum), | 382 | self._get_part_image(pnum), |
365 | abs_path) | 383 | abs_path) |
diff --git a/scripts/lib/wic/help.py b/scripts/lib/wic/help.py index 163535e431..2e3061f343 100644 --- a/scripts/lib/wic/help.py +++ b/scripts/lib/wic/help.py | |||
@@ -544,18 +544,18 @@ DESCRIPTION | |||
544 | the --source param given to that partition. For example, if the | 544 | the --source param given to that partition. For example, if the |
545 | partition is set up like this: | 545 | partition is set up like this: |
546 | 546 | ||
547 | part /boot --source bootimg-pcbios ... | 547 | part /boot --source bootimg_pcbios ... |
548 | 548 | ||
549 | then the methods defined as class members of the plugin having the | 549 | then the methods defined as class members of the plugin having the |
550 | matching bootimg-pcbios .name class member would be used. | 550 | matching bootimg_pcbios .name class member would be used. |
551 | 551 | ||
552 | To be more concrete, here's the plugin definition that would match | 552 | To be more concrete, here's the plugin definition that would match |
553 | a '--source bootimg-pcbios' usage, along with an example method | 553 | a '--source bootimg_pcbios' usage, along with an example method |
554 | that would be called by the wic implementation when it needed to | 554 | that would be called by the wic implementation when it needed to |
555 | invoke an implementation-specific partition-preparation function: | 555 | invoke an implementation-specific partition-preparation function: |
556 | 556 | ||
557 | class BootimgPcbiosPlugin(SourcePlugin): | 557 | class BootimgPcbiosPlugin(SourcePlugin): |
558 | name = 'bootimg-pcbios' | 558 | name = 'bootimg_pcbios' |
559 | 559 | ||
560 | @classmethod | 560 | @classmethod |
561 | def do_prepare_partition(self, part, ...) | 561 | def do_prepare_partition(self, part, ...) |
@@ -794,7 +794,7 @@ DESCRIPTION | |||
794 | 794 | ||
795 | Here is a content of test.wks: | 795 | Here is a content of test.wks: |
796 | 796 | ||
797 | part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 | 797 | part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024 |
798 | part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024 | 798 | part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024 |
799 | 799 | ||
800 | bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0" | 800 | bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0" |
@@ -916,6 +916,10 @@ DESCRIPTION | |||
916 | will create empty partition. --size parameter has | 916 | will create empty partition. --size parameter has |
917 | to be used to specify size of empty partition. | 917 | to be used to specify size of empty partition. |
918 | 918 | ||
919 | --sourceparams: This option is specific to wic. Supply additional | ||
920 | parameters to the source plugin in | ||
921 | key1=value1,key2 format. | ||
922 | |||
919 | --ondisk or --ondrive: Forces the partition to be created on | 923 | --ondisk or --ondrive: Forces the partition to be created on |
920 | a particular disk. | 924 | a particular disk. |
921 | 925 | ||
@@ -932,6 +936,7 @@ DESCRIPTION | |||
932 | squashfs | 936 | squashfs |
933 | erofs | 937 | erofs |
934 | swap | 938 | swap |
939 | none | ||
935 | 940 | ||
936 | --fsoptions: Specifies a free-form string of options to be | 941 | --fsoptions: Specifies a free-form string of options to be |
937 | used when mounting the filesystem. This string | 942 | used when mounting the filesystem. This string |
@@ -965,6 +970,14 @@ DESCRIPTION | |||
965 | to start a partition on an x KBytes | 970 | to start a partition on an x KBytes |
966 | boundary. | 971 | boundary. |
967 | 972 | ||
973 | --offset: This option is specific to wic that says to place a partition | ||
974 | at exactly the specified offset. If the partition cannot be | ||
975 | placed at the specified offset, the image build will fail. | ||
976 | Specify as an integer value optionally followed by one of the | ||
977 | units s/S for 512 byte sector, k/K for kibibyte, M for | ||
978 | mebibyte and G for gibibyte. The default unit if none is | ||
979 | given is k. | ||
980 | |||
968 | --no-table: This option is specific to wic. Space will be | 981 | --no-table: This option is specific to wic. Space will be |
969 | reserved for the partition and it will be | 982 | reserved for the partition and it will be |
970 | populated but it will not be added to the | 983 | populated but it will not be added to the |
@@ -1045,6 +1058,18 @@ DESCRIPTION | |||
1045 | not take effect when --mkfs-extraopts is used. This should be taken into | 1058 | not take effect when --mkfs-extraopts is used. This should be taken into |
1046 | account when using --mkfs-extraopts. | 1059 | account when using --mkfs-extraopts. |
1047 | 1060 | ||
1061 | --type: This option is specific to wic. Valid values are 'primary', | ||
1062 | 'logical'. For msdos partition tables, this option specifies | ||
1063 | the partition type. | ||
1064 | |||
1065 | --hidden: This option is specific to wic. This option sets the | ||
1066 | RequiredPartition bit (bit 0) on GPT partitions. | ||
1067 | |||
1068 | --mbr: This option is specific to wic. This option is used with the | ||
1069 | gpt-hybrid partition type that uses both a GPT partition and | ||
1070 | an MBR header. Partitions with this flag will be included in | ||
1071 | this MBR header. | ||
1072 | |||
1048 | * bootloader | 1073 | * bootloader |
1049 | 1074 | ||
1050 | This command allows the user to specify various bootloader | 1075 | This command allows the user to specify various bootloader |
@@ -1063,6 +1088,13 @@ DESCRIPTION | |||
1063 | file. Using this option will override any other | 1088 | file. Using this option will override any other |
1064 | bootloader option. | 1089 | bootloader option. |
1065 | 1090 | ||
1091 | --ptable: Specifies the partition table format. Valid values are | ||
1092 | 'msdos', 'gpt', 'gpt-hybrid'. | ||
1093 | |||
1094 | --source: Specifies the source plugin. If not specified, the | ||
1095 | --source value will be copied from the partition that has | ||
1096 | /boot as mountpoint. | ||
1097 | |||
1066 | Note that bootloader functionality and boot partitions are | 1098 | Note that bootloader functionality and boot partitions are |
1067 | implemented by the various --source plugins that implement | 1099 | implemented by the various --source plugins that implement |
1068 | bootloader functionality; the bootloader command essentially | 1100 | bootloader functionality; the bootloader command essentially |
diff --git a/scripts/lib/wic/partition.py b/scripts/lib/wic/partition.py index 795707ec5d..b34691d313 100644 --- a/scripts/lib/wic/partition.py +++ b/scripts/lib/wic/partition.py | |||
@@ -164,6 +164,9 @@ class Partition(): | |||
164 | 164 | ||
165 | plugins = PluginMgr.get_plugins('source') | 165 | plugins = PluginMgr.get_plugins('source') |
166 | 166 | ||
167 | # Don't support '-' in plugin names | ||
168 | self.source = self.source.replace("-", "_") | ||
169 | |||
167 | if self.source not in plugins: | 170 | if self.source not in plugins: |
168 | raise WicError("The '%s' --source specified for %s doesn't exist.\n\t" | 171 | raise WicError("The '%s' --source specified for %s doesn't exist.\n\t" |
169 | "See 'wic list source-plugins' for a list of available" | 172 | "See 'wic list source-plugins' for a list of available" |
@@ -178,7 +181,7 @@ class Partition(): | |||
178 | splitted = self.sourceparams.split(',') | 181 | splitted = self.sourceparams.split(',') |
179 | srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par) | 182 | srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par) |
180 | 183 | ||
181 | plugin = PluginMgr.get_plugins('source')[self.source] | 184 | plugin = plugins[self.source] |
182 | plugin.do_configure_partition(self, srcparams_dict, creator, | 185 | plugin.do_configure_partition(self, srcparams_dict, creator, |
183 | cr_workdir, oe_builddir, bootimg_dir, | 186 | cr_workdir, oe_builddir, bootimg_dir, |
184 | kernel_dir, native_sysroot) | 187 | kernel_dir, native_sysroot) |
@@ -222,19 +225,19 @@ class Partition(): | |||
222 | if (pseudo_dir): | 225 | if (pseudo_dir): |
223 | # Canonicalize the ignore paths. This corresponds to | 226 | # Canonicalize the ignore paths. This corresponds to |
224 | # calling oe.path.canonicalize(), which is used in bitbake.conf. | 227 | # calling oe.path.canonicalize(), which is used in bitbake.conf. |
225 | ignore_paths = [rootfs] + (get_bitbake_var("PSEUDO_IGNORE_PATHS") or "").split(",") | 228 | include_paths = [rootfs_dir] + (get_bitbake_var("PSEUDO_INCLUDE_PATHS") or "").split(",") |
226 | canonical_paths = [] | 229 | canonical_paths = [] |
227 | for path in ignore_paths: | 230 | for path in include_paths: |
228 | if "$" not in path: | 231 | if "$" not in path: |
229 | trailing_slash = path.endswith("/") and "/" or "" | 232 | trailing_slash = path.endswith("/") and "/" or "" |
230 | canonical_paths.append(os.path.realpath(path) + trailing_slash) | 233 | canonical_paths.append(os.path.realpath(path) + trailing_slash) |
231 | ignore_paths = ",".join(canonical_paths) | 234 | include_paths = ",".join(canonical_paths) |
232 | 235 | ||
233 | pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix | 236 | pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix |
234 | pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir | 237 | pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir |
235 | pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir | 238 | pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir |
236 | pseudo += "export PSEUDO_NOSYMLINKEXP=1;" | 239 | pseudo += "export PSEUDO_NOSYMLINKEXP=1;" |
237 | pseudo += "export PSEUDO_IGNORE_PATHS=%s;" % ignore_paths | 240 | pseudo += "export PSEUDO_INCLUDE_PATHS=%s;" % include_paths |
238 | pseudo += "%s " % get_bitbake_var("FAKEROOTCMD") | 241 | pseudo += "%s " % get_bitbake_var("FAKEROOTCMD") |
239 | else: | 242 | else: |
240 | pseudo = None | 243 | pseudo = None |
@@ -244,7 +247,7 @@ class Partition(): | |||
244 | # from bitbake variable | 247 | # from bitbake variable |
245 | rsize_bb = get_bitbake_var('ROOTFS_SIZE') | 248 | rsize_bb = get_bitbake_var('ROOTFS_SIZE') |
246 | rdir = get_bitbake_var('IMAGE_ROOTFS') | 249 | rdir = get_bitbake_var('IMAGE_ROOTFS') |
247 | if rsize_bb and rdir == rootfs_dir: | 250 | if rsize_bb and (rdir == rootfs_dir or (rootfs_dir.split('/')[-2] == "tmp-wic" and rootfs_dir.split('/')[-1][:6] == "rootfs")): |
248 | # Bitbake variable ROOTFS_SIZE is calculated in | 251 | # Bitbake variable ROOTFS_SIZE is calculated in |
249 | # Image._get_rootfs_size method from meta/lib/oe/image.py | 252 | # Image._get_rootfs_size method from meta/lib/oe/image.py |
250 | # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT, | 253 | # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT, |
@@ -284,19 +287,8 @@ class Partition(): | |||
284 | 287 | ||
285 | extraopts = self.mkfs_extraopts or "-F -i 8192" | 288 | extraopts = self.mkfs_extraopts or "-F -i 8192" |
286 | 289 | ||
287 | if os.getenv('SOURCE_DATE_EPOCH'): | 290 | # use hash_seed to generate reproducible ext4 images |
288 | sde_time = int(os.getenv('SOURCE_DATE_EPOCH')) | 291 | (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, pseudo) |
289 | if pseudo: | ||
290 | pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo) | ||
291 | else: | ||
292 | pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time | ||
293 | |||
294 | # Set hash_seed to generate deterministic directory indexes | ||
295 | namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460") | ||
296 | if self.fsuuid: | ||
297 | namespace = uuid.UUID(self.fsuuid) | ||
298 | hash_seed = str(uuid.uuid5(namespace, str(sde_time))) | ||
299 | extraopts += " -E hash_seed=%s" % hash_seed | ||
300 | 292 | ||
301 | label_str = "" | 293 | label_str = "" |
302 | if self.label: | 294 | if self.label: |
@@ -344,6 +336,23 @@ class Partition(): | |||
344 | 336 | ||
345 | self.check_for_Y2038_problem(rootfs, native_sysroot) | 337 | self.check_for_Y2038_problem(rootfs, native_sysroot) |
346 | 338 | ||
339 | def get_hash_seed_ext4(self, extraopts, pseudo): | ||
340 | if os.getenv('SOURCE_DATE_EPOCH'): | ||
341 | sde_time = int(os.getenv('SOURCE_DATE_EPOCH')) | ||
342 | if pseudo: | ||
343 | pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo) | ||
344 | else: | ||
345 | pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time | ||
346 | |||
347 | # Set hash_seed to generate deterministic directory indexes | ||
348 | namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460") | ||
349 | if self.fsuuid: | ||
350 | namespace = uuid.UUID(self.fsuuid) | ||
351 | hash_seed = str(uuid.uuid5(namespace, str(sde_time))) | ||
352 | extraopts += " -E hash_seed=%s" % hash_seed | ||
353 | |||
354 | return (extraopts, pseudo) | ||
355 | |||
347 | def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, | 356 | def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, |
348 | native_sysroot, pseudo): | 357 | native_sysroot, pseudo): |
349 | """ | 358 | """ |
@@ -437,13 +446,16 @@ class Partition(): | |||
437 | 446 | ||
438 | extraopts = self.mkfs_extraopts or "-i 8192" | 447 | extraopts = self.mkfs_extraopts or "-i 8192" |
439 | 448 | ||
449 | # use hash_seed to generate reproducible ext4 images | ||
450 | (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, None) | ||
451 | |||
440 | label_str = "" | 452 | label_str = "" |
441 | if self.label: | 453 | if self.label: |
442 | label_str = "-L %s" % self.label | 454 | label_str = "-L %s" % self.label |
443 | 455 | ||
444 | mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \ | 456 | mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \ |
445 | (self.fstype, extraopts, label_str, self.fsuuid, rootfs) | 457 | (self.fstype, extraopts, label_str, self.fsuuid, rootfs) |
446 | exec_native_cmd(mkfs_cmd, native_sysroot) | 458 | exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) |
447 | 459 | ||
448 | self.check_for_Y2038_problem(rootfs, native_sysroot) | 460 | self.check_for_Y2038_problem(rootfs, native_sysroot) |
449 | 461 | ||
diff --git a/scripts/lib/wic/pluginbase.py b/scripts/lib/wic/pluginbase.py index b64568339b..640da292d3 100644 --- a/scripts/lib/wic/pluginbase.py +++ b/scripts/lib/wic/pluginbase.py | |||
@@ -44,7 +44,7 @@ class PluginMgr: | |||
44 | path = os.path.join(layer_path, script_plugin_dir) | 44 | path = os.path.join(layer_path, script_plugin_dir) |
45 | path = os.path.abspath(os.path.expanduser(path)) | 45 | path = os.path.abspath(os.path.expanduser(path)) |
46 | if path not in cls._plugin_dirs and os.path.isdir(path): | 46 | if path not in cls._plugin_dirs and os.path.isdir(path): |
47 | cls._plugin_dirs.insert(0, path) | 47 | cls._plugin_dirs.append(path) |
48 | 48 | ||
49 | if ptype not in PLUGINS: | 49 | if ptype not in PLUGINS: |
50 | # load all ptype plugins | 50 | # load all ptype plugins |
diff --git a/scripts/lib/wic/plugins/imager/direct.py b/scripts/lib/wic/plugins/imager/direct.py index a1d152659b..6e1f1c8cba 100644 --- a/scripts/lib/wic/plugins/imager/direct.py +++ b/scripts/lib/wic/plugins/imager/direct.py | |||
@@ -203,6 +203,8 @@ class DirectPlugin(ImagerPlugin): | |||
203 | source_plugin = self.ks.bootloader.source | 203 | source_plugin = self.ks.bootloader.source |
204 | disk_name = self.parts[0].disk | 204 | disk_name = self.parts[0].disk |
205 | if source_plugin: | 205 | if source_plugin: |
206 | # Don't support '-' in plugin names | ||
207 | source_plugin = source_plugin.replace("-", "_") | ||
206 | plugin = PluginMgr.get_plugins('source')[source_plugin] | 208 | plugin = PluginMgr.get_plugins('source')[source_plugin] |
207 | plugin.do_install_disk(self._image, disk_name, self, self.workdir, | 209 | plugin.do_install_disk(self._image, disk_name, self, self.workdir, |
208 | self.oe_builddir, self.bootimg_dir, | 210 | self.oe_builddir, self.bootimg_dir, |
@@ -321,7 +323,15 @@ class PartitionedImage(): | |||
321 | self.partitions = partitions | 323 | self.partitions = partitions |
322 | self.partimages = [] | 324 | self.partimages = [] |
323 | # Size of a sector used in calculations | 325 | # Size of a sector used in calculations |
324 | self.sector_size = SECTOR_SIZE | 326 | sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE') |
327 | if sector_size_str is not None: | ||
328 | try: | ||
329 | self.sector_size = int(sector_size_str) | ||
330 | except ValueError: | ||
331 | self.sector_size = SECTOR_SIZE | ||
332 | else: | ||
333 | self.sector_size = SECTOR_SIZE | ||
334 | |||
325 | self.native_sysroot = native_sysroot | 335 | self.native_sysroot = native_sysroot |
326 | num_real_partitions = len([p for p in self.partitions if not p.no_table]) | 336 | num_real_partitions = len([p for p in self.partitions if not p.no_table]) |
327 | self.extra_space = extra_space | 337 | self.extra_space = extra_space |
@@ -508,7 +518,8 @@ class PartitionedImage(): | |||
508 | logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors", | 518 | logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors", |
509 | parttype, start, end, size) | 519 | parttype, start, end, size) |
510 | 520 | ||
511 | cmd = "parted -s %s unit s mkpart %s" % (device, parttype) | 521 | cmd = "export PARTED_SECTOR_SIZE=%d; parted -s %s unit s mkpart %s" % \ |
522 | (self.sector_size, device, parttype) | ||
512 | if fstype: | 523 | if fstype: |
513 | cmd += " %s" % fstype | 524 | cmd += " %s" % fstype |
514 | cmd += " %d %d" % (start, end) | 525 | cmd += " %d %d" % (start, end) |
@@ -527,8 +538,8 @@ class PartitionedImage(): | |||
527 | os.ftruncate(sparse.fileno(), min_size) | 538 | os.ftruncate(sparse.fileno(), min_size) |
528 | 539 | ||
529 | logger.debug("Initializing partition table for %s", device) | 540 | logger.debug("Initializing partition table for %s", device) |
530 | exec_native_cmd("parted -s %s mklabel %s" % (device, ptable_format), | 541 | exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s mklabel %s" % |
531 | self.native_sysroot) | 542 | (self.sector_size, device, ptable_format), self.native_sysroot) |
532 | 543 | ||
533 | def _write_disk_guid(self): | 544 | def _write_disk_guid(self): |
534 | if self.ptable_format in ('gpt', 'gpt-hybrid'): | 545 | if self.ptable_format in ('gpt', 'gpt-hybrid'): |
@@ -538,7 +549,8 @@ class PartitionedImage(): | |||
538 | self.disk_guid = uuid.uuid4() | 549 | self.disk_guid = uuid.uuid4() |
539 | 550 | ||
540 | logger.debug("Set disk guid %s", self.disk_guid) | 551 | logger.debug("Set disk guid %s", self.disk_guid) |
541 | sfdisk_cmd = "sfdisk --disk-id %s %s" % (self.path, self.disk_guid) | 552 | sfdisk_cmd = "sfdisk --sector-size %s --disk-id %s %s" % \ |
553 | (self.sector_size, self.path, self.disk_guid) | ||
542 | exec_native_cmd(sfdisk_cmd, self.native_sysroot) | 554 | exec_native_cmd(sfdisk_cmd, self.native_sysroot) |
543 | 555 | ||
544 | def create(self): | 556 | def create(self): |
@@ -613,45 +625,44 @@ class PartitionedImage(): | |||
613 | partition_label = part.part_name if part.part_name else part.label | 625 | partition_label = part.part_name if part.part_name else part.label |
614 | logger.debug("partition %d: set name to %s", | 626 | logger.debug("partition %d: set name to %s", |
615 | part.num, partition_label) | 627 | part.num, partition_label) |
616 | exec_native_cmd("sgdisk --change-name=%d:%s %s" % \ | 628 | exec_native_cmd("sfdisk --sector-size %s --part-label %s %d %s" % \ |
617 | (part.num, partition_label, | 629 | (self.sector_size, self.path, part.num, |
618 | self.path), self.native_sysroot) | 630 | partition_label), self.native_sysroot) |
619 | |||
620 | if part.part_type: | 631 | if part.part_type: |
621 | logger.debug("partition %d: set type UID to %s", | 632 | logger.debug("partition %d: set type UID to %s", |
622 | part.num, part.part_type) | 633 | part.num, part.part_type) |
623 | exec_native_cmd("sgdisk --typecode=%d:%s %s" % \ | 634 | exec_native_cmd("sfdisk --sector-size %s --part-type %s %d %s" % \ |
624 | (part.num, part.part_type, | 635 | (self.sector_size, self.path, part.num, |
625 | self.path), self.native_sysroot) | 636 | part.part_type), self.native_sysroot) |
626 | 637 | ||
627 | if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"): | 638 | if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"): |
628 | logger.debug("partition %d: set UUID to %s", | 639 | logger.debug("partition %d: set UUID to %s", |
629 | part.num, part.uuid) | 640 | part.num, part.uuid) |
630 | exec_native_cmd("sgdisk --partition-guid=%d:%s %s" % \ | 641 | exec_native_cmd("sfdisk --sector-size %s --part-uuid %s %d %s" % \ |
631 | (part.num, part.uuid, self.path), | 642 | (self.sector_size, self.path, part.num, part.uuid), |
632 | self.native_sysroot) | 643 | self.native_sysroot) |
633 | 644 | ||
634 | if part.active: | 645 | if part.active: |
635 | flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot" | 646 | flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot" |
636 | logger.debug("Set '%s' flag for partition '%s' on disk '%s'", | 647 | logger.debug("Set '%s' flag for partition '%s' on disk '%s'", |
637 | flag_name, part.num, self.path) | 648 | flag_name, part.num, self.path) |
638 | exec_native_cmd("parted -s %s set %d %s on" % \ | 649 | exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \ |
639 | (self.path, part.num, flag_name), | 650 | (self.sector_size, self.path, part.num, flag_name), |
640 | self.native_sysroot) | 651 | self.native_sysroot) |
641 | if self.ptable_format == 'gpt-hybrid' and part.mbr: | 652 | if self.ptable_format == 'gpt-hybrid' and part.mbr: |
642 | exec_native_cmd("parted -s %s set %d %s on" % \ | 653 | exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \ |
643 | (mbr_path, hybrid_mbr_part_num, "boot"), | 654 | (self.sector_size, mbr_path, hybrid_mbr_part_num, "boot"), |
644 | self.native_sysroot) | 655 | self.native_sysroot) |
645 | if part.system_id: | 656 | if part.system_id: |
646 | exec_native_cmd("sfdisk --part-type %s %s %s" % \ | 657 | exec_native_cmd("sfdisk --sector-size %s --part-type %s %s %s" % \ |
647 | (self.path, part.num, part.system_id), | 658 | (self.sector_size, self.path, part.num, part.system_id), |
648 | self.native_sysroot) | 659 | self.native_sysroot) |
649 | 660 | ||
650 | if part.hidden and self.ptable_format == "gpt": | 661 | if part.hidden and self.ptable_format == "gpt": |
651 | logger.debug("Set hidden attribute for partition '%s' on disk '%s'", | 662 | logger.debug("Set hidden attribute for partition '%s' on disk '%s'", |
652 | part.num, self.path) | 663 | part.num, self.path) |
653 | exec_native_cmd("sfdisk --part-attrs %s %s RequiredPartition" % \ | 664 | exec_native_cmd("sfdisk --sector-size %s --part-attrs %s %s RequiredPartition" % \ |
654 | (self.path, part.num), | 665 | (self.sector_size, self.path, part.num), |
655 | self.native_sysroot) | 666 | self.native_sysroot) |
656 | 667 | ||
657 | if self.ptable_format == "gpt-hybrid": | 668 | if self.ptable_format == "gpt-hybrid": |
@@ -664,7 +675,8 @@ class PartitionedImage(): | |||
664 | # create with an arbitrary type, then change it to the correct type | 675 | # create with an arbitrary type, then change it to the correct type |
665 | # with sfdisk | 676 | # with sfdisk |
666 | self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD) | 677 | self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD) |
667 | exec_native_cmd("sfdisk --part-type %s %d 0xee" % (mbr_path, hybrid_mbr_part_num), | 678 | exec_native_cmd("sfdisk --sector-size %s --part-type %s %d 0xee" % \ |
679 | (self.sector_size, mbr_path, hybrid_mbr_part_num), | ||
668 | self.native_sysroot) | 680 | self.native_sysroot) |
669 | 681 | ||
670 | # Copy hybrid MBR | 682 | # Copy hybrid MBR |
diff --git a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py b/scripts/lib/wic/plugins/source/bootimg_biosplusefi.py index 5bd7390680..4279ddded8 100644 --- a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py +++ b/scripts/lib/wic/plugins/source/bootimg_biosplusefi.py | |||
@@ -13,7 +13,7 @@ | |||
13 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | 13 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
14 | # | 14 | # |
15 | # DESCRIPTION | 15 | # DESCRIPTION |
16 | # This implements the 'bootimg-biosplusefi' source plugin class for 'wic' | 16 | # This implements the 'bootimg_biosplusefi' source plugin class for 'wic' |
17 | # | 17 | # |
18 | # AUTHORS | 18 | # AUTHORS |
19 | # William Bourque <wbourque [at) gmail.com> | 19 | # William Bourque <wbourque [at) gmail.com> |
@@ -34,7 +34,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
34 | 34 | ||
35 | Note it is possible to create an image that can boot from both | 35 | Note it is possible to create an image that can boot from both |
36 | legacy BIOS and EFI by defining two partitions : one with arg | 36 | legacy BIOS and EFI by defining two partitions : one with arg |
37 | --source bootimg-efi and another one with --source bootimg-pcbios. | 37 | --source bootimg_efi and another one with --source bootimg_pcbios. |
38 | However, this method has the obvious downside that it requires TWO | 38 | However, this method has the obvious downside that it requires TWO |
39 | partitions to be created on the storage device. | 39 | partitions to be created on the storage device. |
40 | Both partitions will also be marked as "bootable" which does not work on | 40 | Both partitions will also be marked as "bootable" which does not work on |
@@ -45,7 +45,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
45 | the first partition will be duplicated into the second, even though it | 45 | the first partition will be duplicated into the second, even though it |
46 | will not be used at all. | 46 | will not be used at all. |
47 | 47 | ||
48 | Also, unlike "isoimage-isohybrid" that also does BIOS and EFI, this plugin | 48 | Also, unlike "isoimage_isohybrid" that also does BIOS and EFI, this plugin |
49 | allows you to have more than only a single rootfs partitions and does | 49 | allows you to have more than only a single rootfs partitions and does |
50 | not turn the rootfs into an initramfs RAM image. | 50 | not turn the rootfs into an initramfs RAM image. |
51 | 51 | ||
@@ -53,32 +53,32 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
53 | does not have the limitations listed above. | 53 | does not have the limitations listed above. |
54 | 54 | ||
55 | The plugin is made so it does tries not to reimplement what's already | 55 | The plugin is made so it does tries not to reimplement what's already |
56 | been done in other plugins; as such it imports "bootimg-pcbios" | 56 | been done in other plugins; as such it imports "bootimg_pcbios" |
57 | and "bootimg-efi". | 57 | and "bootimg_efi". |
58 | Plugin "bootimg-pcbios" is used to generate legacy BIOS boot. | 58 | Plugin "bootimg_pcbios" is used to generate legacy BIOS boot. |
59 | Plugin "bootimg-efi" is used to generate the UEFI boot. Note that it | 59 | Plugin "bootimg_efi" is used to generate the UEFI boot. Note that it |
60 | requires a --sourceparams argument to know which loader to use; refer | 60 | requires a --sourceparams argument to know which loader to use; refer |
61 | to "bootimg-efi" code/documentation for the list of loader. | 61 | to "bootimg_efi" code/documentation for the list of loader. |
62 | 62 | ||
63 | Imports are handled with "SourceFileLoader" from importlib as it is | 63 | Imports are handled with "SourceFileLoader" from importlib as it is |
64 | otherwise very difficult to import module that has hyphen "-" in their | 64 | otherwise very difficult to import module that has hyphen "-" in their |
65 | filename. | 65 | filename. |
66 | The SourcePlugin() methods used in the plugins (do_install_disk, | 66 | The SourcePlugin() methods used in the plugins (do_install_disk, |
67 | do_configure_partition, do_prepare_partition) are then called on both, | 67 | do_configure_partition, do_prepare_partition) are then called on both, |
68 | beginning by "bootimg-efi". | 68 | beginning by "bootimg_efi". |
69 | 69 | ||
70 | Plugin options, such as "--sourceparams" can still be passed to a | 70 | Plugin options, such as "--sourceparams" can still be passed to a |
71 | plugin, as long they does not cause issue in the other plugin. | 71 | plugin, as long they does not cause issue in the other plugin. |
72 | 72 | ||
73 | Example wic configuration: | 73 | Example wic configuration: |
74 | part /boot --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\\ | 74 | part /boot --source bootimg_biosplusefi --sourceparams="loader=grub-efi"\\ |
75 | --ondisk sda --label os_boot --active --align 1024 --use-uuid | 75 | --ondisk sda --label os_boot --active --align 1024 --use-uuid |
76 | """ | 76 | """ |
77 | 77 | ||
78 | name = 'bootimg-biosplusefi' | 78 | name = 'bootimg_biosplusefi' |
79 | 79 | ||
80 | __PCBIOS_MODULE_NAME = "bootimg-pcbios" | 80 | __PCBIOS_MODULE_NAME = "bootimg_pcbios" |
81 | __EFI_MODULE_NAME = "bootimg-efi" | 81 | __EFI_MODULE_NAME = "bootimg_efi" |
82 | 82 | ||
83 | __imgEFIObj = None | 83 | __imgEFIObj = None |
84 | __imgBiosObj = None | 84 | __imgBiosObj = None |
@@ -100,7 +100,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
100 | 100 | ||
101 | """ | 101 | """ |
102 | 102 | ||
103 | # Import bootimg-pcbios (class name "BootimgPcbiosPlugin") | 103 | # Import bootimg_pcbios (class name "BootimgPcbiosPlugin") |
104 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), | 104 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), |
105 | cls.__PCBIOS_MODULE_NAME + ".py") | 105 | cls.__PCBIOS_MODULE_NAME + ".py") |
106 | loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath) | 106 | loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath) |
@@ -108,7 +108,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin): | |||
108 | loader.exec_module(mod) | 108 | loader.exec_module(mod) |
109 | cls.__imgBiosObj = mod.BootimgPcbiosPlugin() | 109 | cls.__imgBiosObj = mod.BootimgPcbiosPlugin() |
110 | 110 | ||
111 | # Import bootimg-efi (class name "BootimgEFIPlugin") | 111 | # Import bootimg_efi (class name "BootimgEFIPlugin") |
112 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), | 112 | modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), |
113 | cls.__EFI_MODULE_NAME + ".py") | 113 | cls.__EFI_MODULE_NAME + ".py") |
114 | loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath) | 114 | loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath) |
diff --git a/scripts/lib/wic/plugins/source/bootimg-efi.py b/scripts/lib/wic/plugins/source/bootimg_efi.py index 13a9cddf4e..cf16705a28 100644 --- a/scripts/lib/wic/plugins/source/bootimg-efi.py +++ b/scripts/lib/wic/plugins/source/bootimg_efi.py | |||
@@ -4,7 +4,7 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | # DESCRIPTION | 6 | # DESCRIPTION |
7 | # This implements the 'bootimg-efi' source plugin class for 'wic' | 7 | # This implements the 'bootimg_efi' source plugin class for 'wic' |
8 | # | 8 | # |
9 | # AUTHORS | 9 | # AUTHORS |
10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> | 10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> |
@@ -32,7 +32,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
32 | This plugin supports GRUB 2 and systemd-boot bootloaders. | 32 | This plugin supports GRUB 2 and systemd-boot bootloaders. |
33 | """ | 33 | """ |
34 | 34 | ||
35 | name = 'bootimg-efi' | 35 | name = 'bootimg_efi' |
36 | 36 | ||
37 | @classmethod | 37 | @classmethod |
38 | def _copy_additional_files(cls, hdddir, initrd, dtb): | 38 | def _copy_additional_files(cls, hdddir, initrd, dtb): |
@@ -43,16 +43,18 @@ class BootimgEFIPlugin(SourcePlugin): | |||
43 | if initrd: | 43 | if initrd: |
44 | initrds = initrd.split(';') | 44 | initrds = initrd.split(';') |
45 | for rd in initrds: | 45 | for rd in initrds: |
46 | cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir) | 46 | cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, rd, hdddir) |
47 | exec_cmd(cp_cmd, True) | 47 | out = exec_cmd(cp_cmd, True) |
48 | logger.debug("initrd files:\n%s" % (out)) | ||
48 | else: | 49 | else: |
49 | logger.debug("Ignoring missing initrd") | 50 | logger.debug("Ignoring missing initrd") |
50 | 51 | ||
51 | if dtb: | 52 | if dtb: |
52 | if ';' in dtb: | 53 | if ';' in dtb: |
53 | raise WicError("Only one DTB supported, exiting") | 54 | raise WicError("Only one DTB supported, exiting") |
54 | cp_cmd = "cp %s/%s %s" % (bootimg_dir, dtb, hdddir) | 55 | cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, dtb, hdddir) |
55 | exec_cmd(cp_cmd, True) | 56 | out = exec_cmd(cp_cmd, True) |
57 | logger.debug("dtb files:\n%s" % (out)) | ||
56 | 58 | ||
57 | @classmethod | 59 | @classmethod |
58 | def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params): | 60 | def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params): |
@@ -123,8 +125,16 @@ class BootimgEFIPlugin(SourcePlugin): | |||
123 | @classmethod | 125 | @classmethod |
124 | def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params): | 126 | def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params): |
125 | """ | 127 | """ |
126 | Create loader-specific systemd-boot/gummiboot config | 128 | Create loader-specific systemd-boot/gummiboot config. Unified Kernel Image (uki) |
129 | support is done in image recipe with uki.bbclass and only systemd-boot loader config | ||
130 | and ESP partition structure is created here. | ||
127 | """ | 131 | """ |
132 | # detect uki.bbclass usage | ||
133 | image_classes = get_bitbake_var("IMAGE_CLASSES").split() | ||
134 | unified_image = False | ||
135 | if "uki" in image_classes: | ||
136 | unified_image = True | ||
137 | |||
128 | install_cmd = "install -d %s/loader" % hdddir | 138 | install_cmd = "install -d %s/loader" % hdddir |
129 | exec_cmd(install_cmd) | 139 | exec_cmd(install_cmd) |
130 | 140 | ||
@@ -132,28 +142,26 @@ class BootimgEFIPlugin(SourcePlugin): | |||
132 | exec_cmd(install_cmd) | 142 | exec_cmd(install_cmd) |
133 | 143 | ||
134 | bootloader = creator.ks.bootloader | 144 | bootloader = creator.ks.bootloader |
135 | |||
136 | unified_image = source_params.get('create-unified-kernel-image') == "true" | ||
137 | |||
138 | loader_conf = "" | 145 | loader_conf = "" |
139 | if not unified_image: | ||
140 | loader_conf += "default boot\n" | ||
141 | loader_conf += "timeout %d\n" % bootloader.timeout | ||
142 | 146 | ||
143 | initrd = source_params.get('initrd') | 147 | # 5 seconds is a sensible default timeout |
144 | dtb = source_params.get('dtb') | 148 | loader_conf += "timeout %d\n" % (bootloader.timeout or 5) |
145 | |||
146 | if not unified_image: | ||
147 | cls._copy_additional_files(hdddir, initrd, dtb) | ||
148 | 149 | ||
149 | logger.debug("Writing systemd-boot config " | 150 | logger.debug("Writing systemd-boot config " |
150 | "%s/hdd/boot/loader/loader.conf", cr_workdir) | 151 | "%s/hdd/boot/loader/loader.conf", cr_workdir) |
151 | cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w") | 152 | cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w") |
152 | cfg.write(loader_conf) | 153 | cfg.write(loader_conf) |
154 | logger.debug("loader.conf:\n%s" % (loader_conf)) | ||
153 | cfg.close() | 155 | cfg.close() |
154 | 156 | ||
157 | initrd = source_params.get('initrd') | ||
158 | dtb = source_params.get('dtb') | ||
159 | if not unified_image: | ||
160 | cls._copy_additional_files(hdddir, initrd, dtb) | ||
161 | |||
155 | configfile = creator.ks.bootloader.configfile | 162 | configfile = creator.ks.bootloader.configfile |
156 | custom_cfg = None | 163 | custom_cfg = None |
164 | boot_conf = "" | ||
157 | if configfile: | 165 | if configfile: |
158 | custom_cfg = get_custom_config(configfile) | 166 | custom_cfg = get_custom_config(configfile) |
159 | if custom_cfg: | 167 | if custom_cfg: |
@@ -164,8 +172,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
164 | else: | 172 | else: |
165 | raise WicError("configfile is specified but failed to " | 173 | raise WicError("configfile is specified but failed to " |
166 | "get it from %s.", configfile) | 174 | "get it from %s.", configfile) |
167 | 175 | else: | |
168 | if not custom_cfg: | ||
169 | # Create systemd-boot configuration using parameters from wks file | 176 | # Create systemd-boot configuration using parameters from wks file |
170 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") | 177 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") |
171 | if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1": | 178 | if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1": |
@@ -175,7 +182,6 @@ class BootimgEFIPlugin(SourcePlugin): | |||
175 | 182 | ||
176 | title = source_params.get('title') | 183 | title = source_params.get('title') |
177 | 184 | ||
178 | boot_conf = "" | ||
179 | boot_conf += "title %s\n" % (title if title else "boot") | 185 | boot_conf += "title %s\n" % (title if title else "boot") |
180 | boot_conf += "linux /%s\n" % kernel | 186 | boot_conf += "linux /%s\n" % kernel |
181 | 187 | ||
@@ -200,6 +206,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
200 | "%s/hdd/boot/loader/entries/boot.conf", cr_workdir) | 206 | "%s/hdd/boot/loader/entries/boot.conf", cr_workdir) |
201 | cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w") | 207 | cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w") |
202 | cfg.write(boot_conf) | 208 | cfg.write(boot_conf) |
209 | logger.debug("boot.conf:\n%s" % (boot_conf)) | ||
203 | cfg.close() | 210 | cfg.close() |
204 | 211 | ||
205 | 212 | ||
@@ -223,9 +230,9 @@ class BootimgEFIPlugin(SourcePlugin): | |||
223 | elif source_params['loader'] == 'uefi-kernel': | 230 | elif source_params['loader'] == 'uefi-kernel': |
224 | pass | 231 | pass |
225 | else: | 232 | else: |
226 | raise WicError("unrecognized bootimg-efi loader: %s" % source_params['loader']) | 233 | raise WicError("unrecognized bootimg_efi loader: %s" % source_params['loader']) |
227 | except KeyError: | 234 | except KeyError: |
228 | raise WicError("bootimg-efi requires a loader, none specified") | 235 | raise WicError("bootimg_efi requires a loader, none specified") |
229 | 236 | ||
230 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None: | 237 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None: |
231 | logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES') | 238 | logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES') |
@@ -245,7 +252,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
245 | 252 | ||
246 | # list of tuples (src_name, dst_name) | 253 | # list of tuples (src_name, dst_name) |
247 | deploy_files = [] | 254 | deploy_files = [] |
248 | for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): | 255 | for src_entry in re.findall(r'[\w;\-\.\+/\*]+', boot_files): |
249 | if ';' in src_entry: | 256 | if ';' in src_entry: |
250 | dst_entry = tuple(src_entry.split(';')) | 257 | dst_entry = tuple(src_entry.split(';')) |
251 | if not dst_entry[0] or not dst_entry[1]: | 258 | if not dst_entry[0] or not dst_entry[1]: |
@@ -304,134 +311,43 @@ class BootimgEFIPlugin(SourcePlugin): | |||
304 | (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) | 311 | (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) |
305 | 312 | ||
306 | if source_params.get('create-unified-kernel-image') == "true": | 313 | if source_params.get('create-unified-kernel-image') == "true": |
307 | initrd = source_params.get('initrd') | 314 | raise WicError("create-unified-kernel-image is no longer supported. Please use uki.bbclass.") |
308 | if not initrd: | ||
309 | raise WicError("initrd= must be specified when create-unified-kernel-image=true, exiting") | ||
310 | |||
311 | deploy_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") | ||
312 | efi_stub = glob("%s/%s" % (deploy_dir, "linux*.efi.stub")) | ||
313 | if len(efi_stub) == 0: | ||
314 | raise WicError("Unified Kernel Image EFI stub not found, exiting") | ||
315 | efi_stub = efi_stub[0] | ||
316 | |||
317 | with tempfile.TemporaryDirectory() as tmp_dir: | ||
318 | label = source_params.get('label') | ||
319 | label_conf = "root=%s" % creator.rootdev | ||
320 | if label: | ||
321 | label_conf = "LABEL=%s" % label | ||
322 | |||
323 | bootloader = creator.ks.bootloader | ||
324 | cmdline = open("%s/cmdline" % tmp_dir, "w") | ||
325 | cmdline.write("%s %s" % (label_conf, bootloader.append)) | ||
326 | cmdline.close() | ||
327 | 315 | ||
328 | initrds = initrd.split(';') | 316 | if source_params.get('install-kernel-into-boot-dir') != 'false': |
329 | initrd = open("%s/initrd" % tmp_dir, "wb") | 317 | install_cmd = "install -v -p -m 0644 %s/%s %s/%s" % \ |
330 | for f in initrds: | 318 | (staging_kernel_dir, kernel, hdddir, kernel) |
331 | with open("%s/%s" % (deploy_dir, f), 'rb') as in_file: | 319 | out = exec_cmd(install_cmd) |
332 | shutil.copyfileobj(in_file, initrd) | 320 | logger.debug("Installed kernel files:\n%s" % out) |
333 | initrd.close() | ||
334 | |||
335 | # Searched by systemd-boot: | ||
336 | # https://systemd.io/BOOT_LOADER_SPECIFICATION/#type-2-efi-unified-kernel-images | ||
337 | install_cmd = "install -d %s/EFI/Linux" % hdddir | ||
338 | exec_cmd(install_cmd) | ||
339 | |||
340 | staging_dir_host = get_bitbake_var("STAGING_DIR_HOST") | ||
341 | target_sys = get_bitbake_var("TARGET_SYS") | ||
342 | |||
343 | objdump_cmd = "%s-objdump" % target_sys | ||
344 | objdump_cmd += " -p %s" % efi_stub | ||
345 | objdump_cmd += " | awk '{ if ($1 == \"SectionAlignment\"){print $2} }'" | ||
346 | |||
347 | ret, align_str = exec_native_cmd(objdump_cmd, native_sysroot) | ||
348 | align = int(align_str, 16) | ||
349 | |||
350 | objdump_cmd = "%s-objdump" % target_sys | ||
351 | objdump_cmd += " -h %s | tail -2" % efi_stub | ||
352 | ret, output = exec_native_cmd(objdump_cmd, native_sysroot) | ||
353 | |||
354 | offset = int(output.split()[2], 16) + int(output.split()[3], 16) | ||
355 | |||
356 | osrel_off = offset + align - offset % align | ||
357 | osrel_path = "%s/usr/lib/os-release" % staging_dir_host | ||
358 | osrel_sz = os.stat(osrel_path).st_size | ||
359 | |||
360 | cmdline_off = osrel_off + osrel_sz | ||
361 | cmdline_off = cmdline_off + align - cmdline_off % align | ||
362 | cmdline_sz = os.stat(cmdline.name).st_size | ||
363 | |||
364 | dtb_off = cmdline_off + cmdline_sz | ||
365 | dtb_off = dtb_off + align - dtb_off % align | ||
366 | |||
367 | dtb = source_params.get('dtb') | ||
368 | if dtb: | ||
369 | if ';' in dtb: | ||
370 | raise WicError("Only one DTB supported, exiting") | ||
371 | dtb_path = "%s/%s" % (deploy_dir, dtb) | ||
372 | dtb_params = '--add-section .dtb=%s --change-section-vma .dtb=0x%x' % \ | ||
373 | (dtb_path, dtb_off) | ||
374 | linux_off = dtb_off + os.stat(dtb_path).st_size | ||
375 | linux_off = linux_off + align - linux_off % align | ||
376 | else: | ||
377 | dtb_params = '' | ||
378 | linux_off = dtb_off | ||
379 | |||
380 | linux_path = "%s/%s" % (staging_kernel_dir, kernel) | ||
381 | linux_sz = os.stat(linux_path).st_size | ||
382 | |||
383 | initrd_off = linux_off + linux_sz | ||
384 | initrd_off = initrd_off + align - initrd_off % align | ||
385 | |||
386 | # https://www.freedesktop.org/software/systemd/man/systemd-stub.html | ||
387 | objcopy_cmd = "%s-objcopy" % target_sys | ||
388 | objcopy_cmd += " --enable-deterministic-archives" | ||
389 | objcopy_cmd += " --preserve-dates" | ||
390 | objcopy_cmd += " --add-section .osrel=%s" % osrel_path | ||
391 | objcopy_cmd += " --change-section-vma .osrel=0x%x" % osrel_off | ||
392 | objcopy_cmd += " --add-section .cmdline=%s" % cmdline.name | ||
393 | objcopy_cmd += " --change-section-vma .cmdline=0x%x" % cmdline_off | ||
394 | objcopy_cmd += dtb_params | ||
395 | objcopy_cmd += " --add-section .linux=%s" % linux_path | ||
396 | objcopy_cmd += " --change-section-vma .linux=0x%x" % linux_off | ||
397 | objcopy_cmd += " --add-section .initrd=%s" % initrd.name | ||
398 | objcopy_cmd += " --change-section-vma .initrd=0x%x" % initrd_off | ||
399 | objcopy_cmd += " %s %s/EFI/Linux/linux.efi" % (efi_stub, hdddir) | ||
400 | |||
401 | exec_native_cmd(objcopy_cmd, native_sysroot) | ||
402 | else: | ||
403 | if source_params.get('install-kernel-into-boot-dir') != 'false': | ||
404 | install_cmd = "install -m 0644 %s/%s %s/%s" % \ | ||
405 | (staging_kernel_dir, kernel, hdddir, kernel) | ||
406 | exec_cmd(install_cmd) | ||
407 | 321 | ||
408 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES"): | 322 | if get_bitbake_var("IMAGE_EFI_BOOT_FILES"): |
409 | for src_path, dst_path in cls.install_task: | 323 | for src_path, dst_path in cls.install_task: |
410 | install_cmd = "install -m 0644 -D %s %s" \ | 324 | install_cmd = "install -v -p -m 0644 -D %s %s" \ |
411 | % (os.path.join(kernel_dir, src_path), | 325 | % (os.path.join(kernel_dir, src_path), |
412 | os.path.join(hdddir, dst_path)) | 326 | os.path.join(hdddir, dst_path)) |
413 | exec_cmd(install_cmd) | 327 | out = exec_cmd(install_cmd) |
328 | logger.debug("Installed IMAGE_EFI_BOOT_FILES:\n%s" % out) | ||
414 | 329 | ||
415 | try: | 330 | try: |
416 | if source_params['loader'] == 'grub-efi': | 331 | if source_params['loader'] == 'grub-efi': |
417 | shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir, | 332 | shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir, |
418 | "%s/grub.cfg" % cr_workdir) | 333 | "%s/grub.cfg" % cr_workdir) |
419 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]: | 334 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]: |
420 | cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:]) | 335 | cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:]) |
421 | exec_cmd(cp_cmd, True) | 336 | exec_cmd(cp_cmd, True) |
422 | shutil.move("%s/grub.cfg" % cr_workdir, | 337 | shutil.move("%s/grub.cfg" % cr_workdir, |
423 | "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir) | 338 | "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir) |
424 | elif source_params['loader'] == 'systemd-boot': | 339 | elif source_params['loader'] == 'systemd-boot': |
425 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]: | 340 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]: |
426 | cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:]) | 341 | cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:]) |
427 | exec_cmd(cp_cmd, True) | 342 | out = exec_cmd(cp_cmd, True) |
343 | logger.debug("systemd-boot files:\n%s" % out) | ||
428 | elif source_params['loader'] == 'uefi-kernel': | 344 | elif source_params['loader'] == 'uefi-kernel': |
429 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") | 345 | kernel = get_bitbake_var("KERNEL_IMAGETYPE") |
430 | if not kernel: | 346 | if not kernel: |
431 | raise WicError("Empty KERNEL_IMAGETYPE %s\n" % target) | 347 | raise WicError("Empty KERNEL_IMAGETYPE") |
432 | target = get_bitbake_var("TARGET_SYS") | 348 | target = get_bitbake_var("TARGET_SYS") |
433 | if not target: | 349 | if not target: |
434 | raise WicError("Unknown arch (TARGET_SYS) %s\n" % target) | 350 | raise WicError("Empty TARGET_SYS") |
435 | 351 | ||
436 | if re.match("x86_64", target): | 352 | if re.match("x86_64", target): |
437 | kernel_efi_image = "bootx64.efi" | 353 | kernel_efi_image = "bootx64.efi" |
@@ -445,23 +361,33 @@ class BootimgEFIPlugin(SourcePlugin): | |||
445 | raise WicError("UEFI stub kernel is incompatible with target %s" % target) | 361 | raise WicError("UEFI stub kernel is incompatible with target %s" % target) |
446 | 362 | ||
447 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]: | 363 | for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]: |
448 | cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image) | 364 | cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image) |
449 | exec_cmd(cp_cmd, True) | 365 | out = exec_cmd(cp_cmd, True) |
366 | logger.debug("uefi-kernel files:\n%s" % out) | ||
450 | else: | 367 | else: |
451 | raise WicError("unrecognized bootimg-efi loader: %s" % | 368 | raise WicError("unrecognized bootimg_efi loader: %s" % |
452 | source_params['loader']) | 369 | source_params['loader']) |
370 | |||
371 | # must have installed at least one EFI bootloader | ||
372 | out = glob(os.path.join(hdddir, 'EFI', 'BOOT', 'boot*.efi')) | ||
373 | logger.debug("Installed EFI loader files:\n%s" % out) | ||
374 | if not out: | ||
375 | raise WicError("No EFI loaders installed to ESP partition. Check that grub-efi, systemd-boot or similar is installed.") | ||
376 | |||
453 | except KeyError: | 377 | except KeyError: |
454 | raise WicError("bootimg-efi requires a loader, none specified") | 378 | raise WicError("bootimg_efi requires a loader, none specified") |
455 | 379 | ||
456 | startup = os.path.join(kernel_dir, "startup.nsh") | 380 | startup = os.path.join(kernel_dir, "startup.nsh") |
457 | if os.path.exists(startup): | 381 | if os.path.exists(startup): |
458 | cp_cmd = "cp %s %s/" % (startup, hdddir) | 382 | cp_cmd = "cp -v -p %s %s/" % (startup, hdddir) |
459 | exec_cmd(cp_cmd, True) | 383 | out = exec_cmd(cp_cmd, True) |
384 | logger.debug("startup files:\n%s" % out) | ||
460 | 385 | ||
461 | for paths in part.include_path or []: | 386 | for paths in part.include_path or []: |
462 | for path in paths: | 387 | for path in paths: |
463 | cp_cmd = "cp -r %s %s/" % (path, hdddir) | 388 | cp_cmd = "cp -v -p -r %s %s/" % (path, hdddir) |
464 | exec_cmd(cp_cmd, True) | 389 | exec_cmd(cp_cmd, True) |
390 | logger.debug("include_path files:\n%s" % out) | ||
465 | 391 | ||
466 | du_cmd = "du -bks %s" % hdddir | 392 | du_cmd = "du -bks %s" % hdddir |
467 | out = exec_cmd(du_cmd) | 393 | out = exec_cmd(du_cmd) |
@@ -489,12 +415,14 @@ class BootimgEFIPlugin(SourcePlugin): | |||
489 | 415 | ||
490 | label = part.label if part.label else "ESP" | 416 | label = part.label if part.label else "ESP" |
491 | 417 | ||
492 | dosfs_cmd = "mkdosfs -n %s -i %s -C %s %d" % \ | 418 | dosfs_cmd = "mkdosfs -v -n %s -i %s -C %s %d" % \ |
493 | (label, part.fsuuid, bootimg, blocks) | 419 | (label, part.fsuuid, bootimg, blocks) |
494 | exec_native_cmd(dosfs_cmd, native_sysroot) | 420 | exec_native_cmd(dosfs_cmd, native_sysroot) |
421 | logger.debug("mkdosfs:\n%s" % (str(out))) | ||
495 | 422 | ||
496 | mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (bootimg, hdddir) | 423 | mcopy_cmd = "mcopy -v -p -i %s -s %s/* ::/" % (bootimg, hdddir) |
497 | exec_native_cmd(mcopy_cmd, native_sysroot) | 424 | out = exec_native_cmd(mcopy_cmd, native_sysroot) |
425 | logger.debug("mcopy:\n%s" % (str(out))) | ||
498 | 426 | ||
499 | chmod_cmd = "chmod 644 %s" % bootimg | 427 | chmod_cmd = "chmod 644 %s" % bootimg |
500 | exec_cmd(chmod_cmd) | 428 | exec_cmd(chmod_cmd) |
diff --git a/scripts/lib/wic/plugins/source/bootimg-partition.py b/scripts/lib/wic/plugins/source/bootimg_partition.py index 1071d1af3f..cc121a78f0 100644 --- a/scripts/lib/wic/plugins/source/bootimg-partition.py +++ b/scripts/lib/wic/plugins/source/bootimg_partition.py | |||
@@ -4,7 +4,7 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | # DESCRIPTION | 6 | # DESCRIPTION |
7 | # This implements the 'bootimg-partition' source plugin class for | 7 | # This implements the 'bootimg_partition' source plugin class for |
8 | # 'wic'. The plugin creates an image of boot partition, copying over | 8 | # 'wic'. The plugin creates an image of boot partition, copying over |
9 | # files listed in IMAGE_BOOT_FILES bitbake variable. | 9 | # files listed in IMAGE_BOOT_FILES bitbake variable. |
10 | # | 10 | # |
@@ -16,7 +16,7 @@ import logging | |||
16 | import os | 16 | import os |
17 | import re | 17 | import re |
18 | 18 | ||
19 | from glob import glob | 19 | from oe.bootfiles import get_boot_files |
20 | 20 | ||
21 | from wic import WicError | 21 | from wic import WicError |
22 | from wic.engine import get_custom_config | 22 | from wic.engine import get_custom_config |
@@ -31,7 +31,7 @@ class BootimgPartitionPlugin(SourcePlugin): | |||
31 | listed in IMAGE_BOOT_FILES bitbake variable. | 31 | listed in IMAGE_BOOT_FILES bitbake variable. |
32 | """ | 32 | """ |
33 | 33 | ||
34 | name = 'bootimg-partition' | 34 | name = 'bootimg_partition' |
35 | image_boot_files_var_name = 'IMAGE_BOOT_FILES' | 35 | image_boot_files_var_name = 'IMAGE_BOOT_FILES' |
36 | 36 | ||
37 | @classmethod | 37 | @classmethod |
@@ -66,42 +66,7 @@ class BootimgPartitionPlugin(SourcePlugin): | |||
66 | 66 | ||
67 | logger.debug('Boot files: %s', boot_files) | 67 | logger.debug('Boot files: %s', boot_files) |
68 | 68 | ||
69 | # list of tuples (src_name, dst_name) | 69 | cls.install_task = get_boot_files(kernel_dir, boot_files) |
70 | deploy_files = [] | ||
71 | for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): | ||
72 | if ';' in src_entry: | ||
73 | dst_entry = tuple(src_entry.split(';')) | ||
74 | if not dst_entry[0] or not dst_entry[1]: | ||
75 | raise WicError('Malformed boot file entry: %s' % src_entry) | ||
76 | else: | ||
77 | dst_entry = (src_entry, src_entry) | ||
78 | |||
79 | logger.debug('Destination entry: %r', dst_entry) | ||
80 | deploy_files.append(dst_entry) | ||
81 | |||
82 | cls.install_task = []; | ||
83 | for deploy_entry in deploy_files: | ||
84 | src, dst = deploy_entry | ||
85 | if '*' in src: | ||
86 | # by default install files under their basename | ||
87 | entry_name_fn = os.path.basename | ||
88 | if dst != src: | ||
89 | # unless a target name was given, then treat name | ||
90 | # as a directory and append a basename | ||
91 | entry_name_fn = lambda name: \ | ||
92 | os.path.join(dst, | ||
93 | os.path.basename(name)) | ||
94 | |||
95 | srcs = glob(os.path.join(kernel_dir, src)) | ||
96 | |||
97 | logger.debug('Globbed sources: %s', ', '.join(srcs)) | ||
98 | for entry in srcs: | ||
99 | src = os.path.relpath(entry, kernel_dir) | ||
100 | entry_dst_name = entry_name_fn(entry) | ||
101 | cls.install_task.append((src, entry_dst_name)) | ||
102 | else: | ||
103 | cls.install_task.append((src, dst)) | ||
104 | |||
105 | if source_params.get('loader') != "u-boot": | 70 | if source_params.get('loader') != "u-boot": |
106 | return | 71 | return |
107 | 72 | ||
diff --git a/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/scripts/lib/wic/plugins/source/bootimg_pcbios.py index a207a83530..21f41e00bb 100644 --- a/scripts/lib/wic/plugins/source/bootimg-pcbios.py +++ b/scripts/lib/wic/plugins/source/bootimg_pcbios.py | |||
@@ -4,7 +4,7 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | # DESCRIPTION | 6 | # DESCRIPTION |
7 | # This implements the 'bootimg-pcbios' source plugin class for 'wic' | 7 | # This implements the 'bootimg_pcbios' source plugin class for 'wic' |
8 | # | 8 | # |
9 | # AUTHORS | 9 | # AUTHORS |
10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> | 10 | # Tom Zanussi <tom.zanussi (at] linux.intel.com> |
@@ -27,7 +27,7 @@ class BootimgPcbiosPlugin(SourcePlugin): | |||
27 | Create MBR boot partition and install syslinux on it. | 27 | Create MBR boot partition and install syslinux on it. |
28 | """ | 28 | """ |
29 | 29 | ||
30 | name = 'bootimg-pcbios' | 30 | name = 'bootimg_pcbios' |
31 | 31 | ||
32 | @classmethod | 32 | @classmethod |
33 | def _get_bootimg_dir(cls, bootimg_dir, dirname): | 33 | def _get_bootimg_dir(cls, bootimg_dir, dirname): |
diff --git a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/scripts/lib/wic/plugins/source/isoimage_isohybrid.py index 607356ad13..5d42eb5d3e 100644 --- a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py +++ b/scripts/lib/wic/plugins/source/isoimage_isohybrid.py | |||
@@ -4,7 +4,7 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | # DESCRIPTION | 6 | # DESCRIPTION |
7 | # This implements the 'isoimage-isohybrid' source plugin class for 'wic' | 7 | # This implements the 'isoimage_isohybrid' source plugin class for 'wic' |
8 | # | 8 | # |
9 | # AUTHORS | 9 | # AUTHORS |
10 | # Mihaly Varga <mihaly.varga (at] ni.com> | 10 | # Mihaly Varga <mihaly.varga (at] ni.com> |
@@ -35,7 +35,7 @@ class IsoImagePlugin(SourcePlugin): | |||
35 | bootloader files. | 35 | bootloader files. |
36 | 36 | ||
37 | Example kickstart file: | 37 | Example kickstart file: |
38 | part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi, \\ | 38 | part /boot --source isoimage_isohybrid --sourceparams="loader=grub-efi, \\ |
39 | image_name= IsoImage" --ondisk cd --label LIVECD | 39 | image_name= IsoImage" --ondisk cd --label LIVECD |
40 | bootloader --timeout=10 --append=" " | 40 | bootloader --timeout=10 --append=" " |
41 | 41 | ||
@@ -45,7 +45,7 @@ class IsoImagePlugin(SourcePlugin): | |||
45 | extension added by direct imeger plugin) and a file named IsoImage-cd.iso | 45 | extension added by direct imeger plugin) and a file named IsoImage-cd.iso |
46 | """ | 46 | """ |
47 | 47 | ||
48 | name = 'isoimage-isohybrid' | 48 | name = 'isoimage_isohybrid' |
49 | 49 | ||
50 | @classmethod | 50 | @classmethod |
51 | def do_configure_syslinux(cls, creator, cr_workdir): | 51 | def do_configure_syslinux(cls, creator, cr_workdir): |
@@ -340,10 +340,10 @@ class IsoImagePlugin(SourcePlugin): | |||
340 | cls.do_configure_grubefi(part, creator, target_dir) | 340 | cls.do_configure_grubefi(part, creator, target_dir) |
341 | 341 | ||
342 | else: | 342 | else: |
343 | raise WicError("unrecognized bootimg-efi loader: %s" % | 343 | raise WicError("unrecognized bootimg_efi loader: %s" % |
344 | source_params['loader']) | 344 | source_params['loader']) |
345 | except KeyError: | 345 | except KeyError: |
346 | raise WicError("bootimg-efi requires a loader, none specified") | 346 | raise WicError("bootimg_efi requires a loader, none specified") |
347 | 347 | ||
348 | # Create efi.img that contains bootloader files for EFI booting | 348 | # Create efi.img that contains bootloader files for EFI booting |
349 | # if ISODIR didn't exist or didn't contains it | 349 | # if ISODIR didn't exist or didn't contains it |
diff --git a/scripts/lib/wic/plugins/source/rootfs.py b/scripts/lib/wic/plugins/source/rootfs.py index e29f3a4c2f..06fce06bb1 100644 --- a/scripts/lib/wic/plugins/source/rootfs.py +++ b/scripts/lib/wic/plugins/source/rootfs.py | |||
@@ -41,9 +41,9 @@ class RootfsPlugin(SourcePlugin): | |||
41 | # Disallow climbing outside of parent directory using '..', | 41 | # Disallow climbing outside of parent directory using '..', |
42 | # because doing so could be quite disastrous (we will delete the | 42 | # because doing so could be quite disastrous (we will delete the |
43 | # directory, or modify a directory outside OpenEmbedded). | 43 | # directory, or modify a directory outside OpenEmbedded). |
44 | full_path = os.path.realpath(os.path.join(rootfs_dir, path)) | 44 | full_path = os.path.abspath(os.path.join(rootfs_dir, path)) |
45 | if not full_path.startswith(os.path.realpath(rootfs_dir)): | 45 | if not full_path.startswith(os.path.realpath(rootfs_dir)): |
46 | logger.error("%s: Must point inside the rootfs:" % (cmd, path)) | 46 | logger.error("%s: Must point inside the rootfs: %s" % (cmd, path)) |
47 | sys.exit(1) | 47 | sys.exit(1) |
48 | 48 | ||
49 | return full_path | 49 | return full_path |
diff --git a/scripts/lz4c b/scripts/lz4c new file mode 100755 index 0000000000..466fc349e0 --- /dev/null +++ b/scripts/lz4c | |||
@@ -0,0 +1,26 @@ | |||
1 | #!/usr/bin/env bash | ||
2 | |||
3 | # Wrapper to intercept legacy lz4c arguments and convert to lz4. | ||
4 | args=() | ||
5 | while [ $# -ne 0 ]; do | ||
6 | case ${1} in | ||
7 | -c0) | ||
8 | args+=(-0) | ||
9 | ;; | ||
10 | -c1) | ||
11 | args+=(-9) | ||
12 | ;; | ||
13 | -c2|-hc) | ||
14 | args+=(-12) | ||
15 | ;; | ||
16 | -y) | ||
17 | args+=(--force) | ||
18 | ;; | ||
19 | *) | ||
20 | args+=("${1}") | ||
21 | ;; | ||
22 | esac | ||
23 | shift | ||
24 | done | ||
25 | |||
26 | exec lz4 "${args[@]}" | ||
diff --git a/scripts/makefile-getvar b/scripts/makefile-getvar new file mode 100755 index 0000000000..4a07055e68 --- /dev/null +++ b/scripts/makefile-getvar | |||
@@ -0,0 +1,24 @@ | |||
1 | #! /bin/sh | ||
2 | |||
3 | # Get a variable's value from a makefile: | ||
4 | # | ||
5 | # $ makefile-getvar Makefile VARIABLE VARIABLE ... | ||
6 | # | ||
7 | # If multiple variables are specified, they will be printed one per line. | ||
8 | # | ||
9 | # SPDX-FileCopyrightText: Copyright 2024 Arm Limited and/or its affiliates <open-source-office@arm.com> | ||
10 | # SPDX-License-Identifier: GPL-2.0-only | ||
11 | |||
12 | set -eu | ||
13 | |||
14 | MAKEFILE=$1 | ||
15 | shift | ||
16 | |||
17 | for VARIABLE in $*; do | ||
18 | make -f - $VARIABLE.var <<EOF | ||
19 | include $MAKEFILE | ||
20 | |||
21 | %.var: | ||
22 | @echo \$(\$*) | ||
23 | EOF | ||
24 | done | ||
diff --git a/scripts/oe-build-perf-report b/scripts/oe-build-perf-report index 7812ea4540..a36f3c1bca 100755 --- a/scripts/oe-build-perf-report +++ b/scripts/oe-build-perf-report | |||
@@ -336,8 +336,16 @@ def print_html_report(data, id_comp, buildstats): | |||
336 | test_i = test_data['tests'][test] | 336 | test_i = test_data['tests'][test] |
337 | meas_i = test_i['measurements'][meas] | 337 | meas_i = test_i['measurements'][meas] |
338 | commit_num = get_data_item(meta, 'layers.meta.commit_count') | 338 | commit_num = get_data_item(meta, 'layers.meta.commit_count') |
339 | samples.append(measurement_stats(meas_i)) | 339 | commit = get_data_item(meta, 'layers.meta.commit') |
340 | # Add start_time for both test measurement types of sysres and disk usage | ||
341 | try: | ||
342 | # Use the commit_time if available, falling back to start_time | ||
343 | start_time = get_data_item(meta, 'layers.meta.commit_time') | ||
344 | except KeyError: | ||
345 | start_time = test_i['start_time'][0] | ||
346 | samples.append(measurement_stats(meas_i, '', start_time)) | ||
340 | samples[-1]['commit_num'] = commit_num | 347 | samples[-1]['commit_num'] = commit_num |
348 | samples[-1]['commit'] = commit | ||
341 | 349 | ||
342 | absdiff = samples[-1]['val_cls'](samples[-1]['mean'] - samples[id_comp]['mean']) | 350 | absdiff = samples[-1]['val_cls'](samples[-1]['mean'] - samples[id_comp]['mean']) |
343 | reldiff = absdiff * 100 / samples[id_comp]['mean'] | 351 | reldiff = absdiff * 100 / samples[id_comp]['mean'] |
@@ -473,7 +481,7 @@ Examine build performance test results from a Git repository""" | |||
473 | group.add_argument('--branch', '-B', default='master', help="Branch to find commit in") | 481 | group.add_argument('--branch', '-B', default='master', help="Branch to find commit in") |
474 | group.add_argument('--branch2', help="Branch to find comparision revisions in") | 482 | group.add_argument('--branch2', help="Branch to find comparision revisions in") |
475 | group.add_argument('--machine', default='qemux86') | 483 | group.add_argument('--machine', default='qemux86') |
476 | group.add_argument('--history-length', default=25, type=int, | 484 | group.add_argument('--history-length', default=300, type=int, |
477 | help="Number of tested revisions to plot in html report") | 485 | help="Number of tested revisions to plot in html report") |
478 | group.add_argument('--commit', | 486 | group.add_argument('--commit', |
479 | help="Revision to search for") | 487 | help="Revision to search for") |
diff --git a/scripts/oe-debuginfod b/scripts/oe-debuginfod index b525310225..5e70d37b8b 100755 --- a/scripts/oe-debuginfod +++ b/scripts/oe-debuginfod | |||
@@ -15,14 +15,29 @@ scriptpath.add_bitbake_lib_path() | |||
15 | 15 | ||
16 | import bb.tinfoil | 16 | import bb.tinfoil |
17 | import subprocess | 17 | import subprocess |
18 | import argparse | ||
18 | 19 | ||
19 | if __name__ == "__main__": | 20 | if __name__ == "__main__": |
21 | p = argparse.ArgumentParser() | ||
22 | p.add_argument("-d", action='store_true', \ | ||
23 | help="store debuginfod files in project sub-directory") | ||
24 | |||
25 | args = p.parse_args() | ||
26 | |||
20 | with bb.tinfoil.Tinfoil() as tinfoil: | 27 | with bb.tinfoil.Tinfoil() as tinfoil: |
21 | tinfoil.prepare(config_only=True) | 28 | tinfoil.prepare(config_only=True) |
22 | package_classes_var = "DEPLOY_DIR_" + tinfoil.config_data.getVar("PACKAGE_CLASSES").split()[0].replace("package_", "").upper() | 29 | package_classes_var = "DEPLOY_DIR_" + tinfoil.config_data.getVar("PACKAGE_CLASSES").split()[0].replace("package_", "").upper() |
23 | feed_dir = tinfoil.config_data.getVar(package_classes_var, expand=True) | 30 | feed_dir = tinfoil.config_data.getVar(package_classes_var, expand=True) |
24 | 31 | ||
32 | opts = [ '--verbose', '-R', '-U', feed_dir ] | ||
33 | |||
34 | if args.d: | ||
35 | fdir = os.path.join(os.getcwd(), 'oedid-files') | ||
36 | os.makedirs(fdir, exist_ok=True) | ||
37 | opts += [ '-d', os.path.join(fdir, 'did.sqlite') ] | ||
38 | |||
25 | subprocess.call(['bitbake', '-c', 'addto_recipe_sysroot', 'elfutils-native']) | 39 | subprocess.call(['bitbake', '-c', 'addto_recipe_sysroot', 'elfutils-native']) |
26 | 40 | ||
27 | subprocess.call(['oe-run-native', 'elfutils-native', 'debuginfod', '--verbose', '-R', '-U', feed_dir]) | 41 | subprocess.call(['oe-run-native', 'elfutils-native', 'debuginfod'] + opts) |
42 | # we should not get here | ||
28 | print("\nTo use the debuginfod server please ensure that this variable PACKAGECONFIG:pn-elfutils-native = \"debuginfod libdebuginfod\" is set in the local.conf") | 43 | print("\nTo use the debuginfod server please ensure that this variable PACKAGECONFIG:pn-elfutils-native = \"debuginfod libdebuginfod\" is set in the local.conf") |
diff --git a/scripts/oe-selftest b/scripts/oe-selftest index 18ac0f5869..afc48d9905 100755 --- a/scripts/oe-selftest +++ b/scripts/oe-selftest | |||
@@ -18,8 +18,6 @@ | |||
18 | 18 | ||
19 | import os | 19 | import os |
20 | import sys | 20 | import sys |
21 | import argparse | ||
22 | import logging | ||
23 | 21 | ||
24 | scripts_path = os.path.dirname(os.path.realpath(__file__)) | 22 | scripts_path = os.path.dirname(os.path.realpath(__file__)) |
25 | lib_path = scripts_path + '/lib' | 23 | lib_path = scripts_path + '/lib' |
diff --git a/scripts/oe-setup-build b/scripts/oe-setup-build index 5364f2b481..49603d9fd1 100755 --- a/scripts/oe-setup-build +++ b/scripts/oe-setup-build | |||
@@ -18,8 +18,7 @@ def makebuildpath(topdir, template): | |||
18 | 18 | ||
19 | def discover_templates(layers_file): | 19 | def discover_templates(layers_file): |
20 | if not os.path.exists(layers_file): | 20 | if not os.path.exists(layers_file): |
21 | print("List of layers {} does not exist; were the layers set up using the setup-layers script?".format(layers_file)) | 21 | raise Exception("List of layers {} does not exist; were the layers set up using the setup-layers script or bitbake-setup tool?".format(layers_file)) |
22 | return None | ||
23 | 22 | ||
24 | templates = [] | 23 | templates = [] |
25 | layers_list = json.load(open(layers_file))["layers"] | 24 | layers_list = json.load(open(layers_file))["layers"] |
@@ -77,8 +76,7 @@ def find_template(template_name, templates): | |||
77 | for t in templates: | 76 | for t in templates: |
78 | if t["templatename"] == template_name: | 77 | if t["templatename"] == template_name: |
79 | return t | 78 | return t |
80 | print("Configuration {} is not one of {}, please try again.".format(tempalte_name, [t["templatename"] for t in templates])) | 79 | raise Exception("Configuration {} is not one of {}, please try again.".format(template_name, [t["templatename"] for t in templates])) |
81 | return None | ||
82 | 80 | ||
83 | def setup_build_env(args): | 81 | def setup_build_env(args): |
84 | templates = discover_templates(args.layerlist) | 82 | templates = discover_templates(args.layerlist) |
@@ -91,11 +89,20 @@ def setup_build_env(args): | |||
91 | builddir = args.b if args.b else template["buildpath"] | 89 | builddir = args.b if args.b else template["buildpath"] |
92 | no_shell = args.no_shell | 90 | no_shell = args.no_shell |
93 | coredir = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')) | 91 | coredir = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')) |
94 | cmd = "TEMPLATECONF={} . {} {}".format(template["templatepath"], os.path.join(coredir, 'oe-init-build-env'), builddir) | 92 | cmd_base = ". {} {}".format(os.path.join(coredir, 'oe-init-build-env'), os.path.abspath(builddir)) |
93 | |||
94 | initbuild = os.path.join(builddir, 'init-build-env') | ||
95 | if not os.path.exists(initbuild): | ||
96 | os.makedirs(builddir, exist_ok=True) | ||
97 | with open(initbuild, 'w') as f: | ||
98 | f.write(cmd_base) | ||
99 | print("\nRun '. {}' to initialize the build in a current shell session.\n".format(initbuild)) | ||
100 | |||
101 | cmd = "TEMPLATECONF={} {}".format(template["templatepath"], cmd_base) | ||
95 | if not no_shell: | 102 | if not no_shell: |
96 | cmd = cmd + " && {}".format(os.environ['SHELL']) | 103 | cmd = cmd + " && {}".format(os.environ.get('SHELL','bash')) |
97 | print("Running:", cmd) | 104 | print("Running:", cmd) |
98 | subprocess.run(cmd, shell=True, executable=os.environ['SHELL']) | 105 | subprocess.run(cmd, shell=True, executable=os.environ.get('SHELL','bash')) |
99 | 106 | ||
100 | parser = argparse.ArgumentParser(description="A script that discovers available build configurations and sets up a build environment based on one of them. Run without arguments to choose one interactively.") | 107 | parser = argparse.ArgumentParser(description="A script that discovers available build configurations and sets up a build environment based on one of them. Run without arguments to choose one interactively.") |
101 | parser.add_argument("--layerlist", default=defaultlayers(), help='Where to look for available layers (as written out by setup-layers script) (default is {}).'.format(defaultlayers())) | 108 | parser.add_argument("--layerlist", default=defaultlayers(), help='Where to look for available layers (as written out by setup-layers script) (default is {}).'.format(defaultlayers())) |
diff --git a/scripts/patchtest b/scripts/patchtest index 0be7062dc2..9218db232a 100755 --- a/scripts/patchtest +++ b/scripts/patchtest | |||
@@ -9,12 +9,12 @@ | |||
9 | # SPDX-License-Identifier: GPL-2.0-only | 9 | # SPDX-License-Identifier: GPL-2.0-only |
10 | # | 10 | # |
11 | 11 | ||
12 | import sys | 12 | import json |
13 | import os | ||
14 | import unittest | ||
15 | import logging | 13 | import logging |
14 | import os | ||
15 | import sys | ||
16 | import traceback | 16 | import traceback |
17 | import json | 17 | import unittest |
18 | 18 | ||
19 | # Include current path so test cases can see it | 19 | # Include current path so test cases can see it |
20 | sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) | 20 | sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) |
@@ -22,16 +22,17 @@ sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) | |||
22 | # Include patchtest library | 22 | # Include patchtest library |
23 | sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '../meta/lib/patchtest')) | 23 | sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '../meta/lib/patchtest')) |
24 | 24 | ||
25 | from data import PatchTestInput | 25 | from patchtest_parser import PatchtestParser |
26 | from repo import PatchTestRepo | 26 | from repo import PatchTestRepo |
27 | 27 | ||
28 | import utils | 28 | logger = logging.getLogger("patchtest") |
29 | logger = utils.logger_create('patchtest') | 29 | loggerhandler = logging.StreamHandler() |
30 | loggerhandler.setFormatter(logging.Formatter("%(message)s")) | ||
31 | logger.addHandler(loggerhandler) | ||
32 | logger.setLevel(logging.INFO) | ||
30 | info = logger.info | 33 | info = logger.info |
31 | error = logger.error | 34 | error = logger.error |
32 | 35 | ||
33 | import repo | ||
34 | |||
35 | def getResult(patch, mergepatch, logfile=None): | 36 | def getResult(patch, mergepatch, logfile=None): |
36 | 37 | ||
37 | class PatchTestResult(unittest.TextTestResult): | 38 | class PatchTestResult(unittest.TextTestResult): |
@@ -46,10 +47,10 @@ def getResult(patch, mergepatch, logfile=None): | |||
46 | def startTestRun(self): | 47 | def startTestRun(self): |
47 | # let's create the repo already, it can be used later on | 48 | # let's create the repo already, it can be used later on |
48 | repoargs = { | 49 | repoargs = { |
49 | 'repodir': PatchTestInput.repodir, | 50 | "repodir": PatchtestParser.repodir, |
50 | 'commit' : PatchTestInput.basecommit, | 51 | "commit": PatchtestParser.basecommit, |
51 | 'branch' : PatchTestInput.basebranch, | 52 | "branch": PatchtestParser.basebranch, |
52 | 'patch' : patch, | 53 | "patch": patch, |
53 | } | 54 | } |
54 | 55 | ||
55 | self.repo_error = False | 56 | self.repo_error = False |
@@ -57,7 +58,7 @@ def getResult(patch, mergepatch, logfile=None): | |||
57 | self.test_failure = False | 58 | self.test_failure = False |
58 | 59 | ||
59 | try: | 60 | try: |
60 | self.repo = PatchTestInput.repo = PatchTestRepo(**repoargs) | 61 | self.repo = PatchtestParser.repo = PatchTestRepo(**repoargs) |
61 | except: | 62 | except: |
62 | logger.error(traceback.print_exc()) | 63 | logger.error(traceback.print_exc()) |
63 | self.repo_error = True | 64 | self.repo_error = True |
@@ -128,7 +129,11 @@ def _runner(resultklass, prefix=None): | |||
128 | loader.testMethodPrefix = prefix | 129 | loader.testMethodPrefix = prefix |
129 | 130 | ||
130 | # create the suite with discovered tests and the corresponding runner | 131 | # create the suite with discovered tests and the corresponding runner |
131 | suite = loader.discover(start_dir=PatchTestInput.testdir, pattern=PatchTestInput.pattern, top_level_dir=PatchTestInput.topdir) | 132 | suite = loader.discover( |
133 | start_dir=PatchtestParser.testdir, | ||
134 | pattern=PatchtestParser.pattern, | ||
135 | top_level_dir=PatchtestParser.topdir, | ||
136 | ) | ||
132 | ntc = suite.countTestCases() | 137 | ntc = suite.countTestCases() |
133 | 138 | ||
134 | # if there are no test cases, just quit | 139 | # if there are no test cases, just quit |
@@ -160,24 +165,31 @@ def run(patch, logfile=None): | |||
160 | postmerge_resultklass = getResult(patch, True, logfile) | 165 | postmerge_resultklass = getResult(patch, True, logfile) |
161 | postmerge_result = _runner(postmerge_resultklass, 'test') | 166 | postmerge_result = _runner(postmerge_resultklass, 'test') |
162 | 167 | ||
163 | print('----------------------------------------------------------------------\n') | 168 | print_result_message(premerge_result, postmerge_result) |
164 | if premerge_result == 2 and postmerge_result == 2: | ||
165 | logger.error('patchtest: No test cases found - did you specify the correct suite directory?') | ||
166 | if premerge_result == 1 or postmerge_result == 1: | ||
167 | logger.error('WARNING: patchtest: At least one patchtest caused a failure or an error - please check https://wiki.yoctoproject.org/wiki/Patchtest for further guidance') | ||
168 | else: | ||
169 | logger.info('OK: patchtest: All patchtests passed') | ||
170 | print('----------------------------------------------------------------------\n') | ||
171 | return premerge_result or postmerge_result | 169 | return premerge_result or postmerge_result |
172 | 170 | ||
171 | def print_result_message(preresult, postresult): | ||
172 | print("----------------------------------------------------------------------\n") | ||
173 | if preresult == 2 and postresult == 2: | ||
174 | logger.error( | ||
175 | "patchtest: No test cases found - did you specify the correct suite directory?" | ||
176 | ) | ||
177 | if preresult == 1 or postresult == 1: | ||
178 | logger.error( | ||
179 | "WARNING: patchtest: At least one patchtest caused a failure or an error - please check https://wiki.yoctoproject.org/wiki/Patchtest for further guidance" | ||
180 | ) | ||
181 | else: | ||
182 | logger.info("OK: patchtest: All patchtests passed") | ||
183 | print("----------------------------------------------------------------------\n") | ||
184 | |||
173 | def main(): | 185 | def main(): |
174 | tmp_patch = False | 186 | tmp_patch = False |
175 | patch_path = PatchTestInput.patch_path | 187 | patch_path = PatchtestParser.patch_path |
176 | log_results = PatchTestInput.log_results | 188 | log_results = PatchtestParser.log_results |
177 | log_path = None | 189 | log_path = None |
178 | patch_list = None | 190 | patch_list = None |
179 | 191 | ||
180 | git_status = os.popen("(cd %s && git status)" % PatchTestInput.repodir).read() | 192 | git_status = os.popen("(cd %s && git status)" % PatchtestParser.repodir).read() |
181 | status_matches = ["Changes not staged for commit", "Changes to be committed"] | 193 | status_matches = ["Changes not staged for commit", "Changes to be committed"] |
182 | if any([match in git_status for match in status_matches]): | 194 | if any([match in git_status for match in status_matches]): |
183 | logger.error("patchtest: there are uncommitted changes in the target repo that would be overwritten. Please commit or restore them before running patchtest") | 195 | logger.error("patchtest: there are uncommitted changes in the target repo that would be overwritten. Please commit or restore them before running patchtest") |
@@ -212,16 +224,16 @@ def main(): | |||
212 | if __name__ == '__main__': | 224 | if __name__ == '__main__': |
213 | ret = 1 | 225 | ret = 1 |
214 | 226 | ||
215 | # Parse the command line arguments and store it on the PatchTestInput namespace | 227 | # Parse the command line arguments and store it on the PatchtestParser namespace |
216 | PatchTestInput.set_namespace() | 228 | PatchtestParser.set_namespace() |
217 | 229 | ||
218 | # set debugging level | 230 | # set debugging level |
219 | if PatchTestInput.debug: | 231 | if PatchtestParser.debug: |
220 | logger.setLevel(logging.DEBUG) | 232 | logger.setLevel(logging.DEBUG) |
221 | 233 | ||
222 | # if topdir not define, default it to testdir | 234 | # if topdir not define, default it to testdir |
223 | if not PatchTestInput.topdir: | 235 | if not PatchtestParser.topdir: |
224 | PatchTestInput.topdir = PatchTestInput.testdir | 236 | PatchtestParser.topdir = PatchtestParser.testdir |
225 | 237 | ||
226 | try: | 238 | try: |
227 | ret = main() | 239 | ret = main() |
diff --git a/scripts/patchtest.README b/scripts/patchtest.README index 76b5fcdb6d..3c1ee1af1d 100644 --- a/scripts/patchtest.README +++ b/scripts/patchtest.README | |||
@@ -3,40 +3,35 @@ | |||
3 | ## Introduction | 3 | ## Introduction |
4 | 4 | ||
5 | Patchtest is a test framework for community patches based on the standard | 5 | Patchtest is a test framework for community patches based on the standard |
6 | unittest python module. As input, it needs tree elements to work properly: | 6 | unittest python module. As input, it needs three elements to work properly: |
7 | a patch in mbox format (either created with `git format-patch` or fetched | 7 | |
8 | from 'patchwork'), a test suite and a target repository. | 8 | - a patch in mbox format (either created with `git format-patch` or fetched |
9 | from 'patchwork') | ||
10 | - a test suite | ||
11 | - a target repository | ||
9 | 12 | ||
10 | The first test suite intended to be used with patchtest is found in the | 13 | The first test suite intended to be used with patchtest is found in the |
11 | openembedded-core repository [1] targeted for patches that get into the | 14 | openembedded-core repository [1], targeted for patches that get into the |
12 | openembedded-core mailing list [2]. This suite is also intended as a | 15 | openembedded-core mailing list [2]. This suite is also intended as a |
13 | baseline for development of similar suites for other layers as needed. | 16 | baseline for development of similar suites for other layers as needed. |
14 | 17 | ||
15 | Patchtest can either run on a host or a guest machine, depending on which | 18 | Patchtest can either run on a host or a guest machine, depending on |
16 | environment the execution needs to be done. If you plan to test your own patches | 19 | which environment you prefer. If you plan to test your own patches (a |
17 | (a good practice before these are sent to the mailing list), the easiest way is | 20 | good practice before these are sent to the mailing list), the easiest |
18 | to install and execute on your local host; in the other hand, if automatic | 21 | way is to install and execute on your local host; in the other hand, if |
19 | testing is intended, the guest method is strongly recommended. The guest | 22 | automatic testing is intended, the guest method is strongly recommended. |
20 | method requires the use of the patchtest layer, in addition to the tools | 23 | The guest method requires the use of the patchtest layer, in addition to |
21 | available in oe-core: https://git.yoctoproject.org/patchtest/ | 24 | the tools available in oe-core: https://git.yoctoproject.org/patchtest/ |
22 | 25 | ||
23 | ## Installation | 26 | ## Installation |
24 | 27 | ||
25 | As a tool for use with the Yocto Project, the [quick start guide](https://docs.yoctoproject.org/brief-yoctoprojectqs/index.html) | 28 | As a tool for use with the Yocto Project, the [quick start |
26 | contains the necessary prerequisites for a basic project. In addition, | 29 | guide](https://docs.yoctoproject.org/brief-yoctoprojectqs/index.html) |
27 | patchtest relies on the following Python modules: | 30 | contains the necessary prerequisites. In addition, patchtest relies on |
28 | 31 | several Python modules for parsing and analysis, which can be installed | |
29 | - boto3 (for sending automated results emails only) | 32 | by running `pip install -r meta/lib/patchtest/requirements.txt`. Note |
30 | - git-pw>=2.5.0 | 33 | that git-pw is not automatically added to the user's PATH; by default, |
31 | - jinja2 | 34 | it is installed at ~/.local/bin/git-pw. |
32 | - pylint | ||
33 | - pyparsing>=3.0.9 | ||
34 | - unidiff | ||
35 | |||
36 | These can be installed by running `pip install -r | ||
37 | meta/lib/patchtest/requirements.txt`. Note that git-pw is not | ||
38 | automatically added to the user's PATH; by default, it is installed at | ||
39 | ~/.local/bin/git-pw. | ||
40 | 35 | ||
41 | For git-pw (and therefore scripts such as patchtest-get--series) to work, you need | 36 | For git-pw (and therefore scripts such as patchtest-get--series) to work, you need |
42 | to provide a Patchwork instance in your user's .gitconfig, like so (the project | 37 | to provide a Patchwork instance in your user's .gitconfig, like so (the project |
@@ -74,7 +69,7 @@ the target project, but these parameters can be configured using the `--limit`, | |||
74 | To run patchtest on the host, do the following: | 69 | To run patchtest on the host, do the following: |
75 | 70 | ||
76 | 1. In openembedded-core/poky, do `source oe-init-build-env` | 71 | 1. In openembedded-core/poky, do `source oe-init-build-env` |
77 | 2. Generate patch files from the target repository by doing `git-format patch -N`, | 72 | 2. Generate patch files from the target repository by doing `git format-patch -N`, |
78 | where N is the number of patches starting at HEAD, or by using git-pw | 73 | where N is the number of patches starting at HEAD, or by using git-pw |
79 | or patchtest-get-series | 74 | or patchtest-get-series |
80 | 3. Run patchtest on a patch file by doing the following: | 75 | 3. Run patchtest on a patch file by doing the following: |
@@ -123,7 +118,7 @@ The general flow of guest mode is: | |||
123 | -device virtio-9p-pci,fsdev=test_mount,mount_tag=test_mount -smp 4 -m | 118 | -device virtio-9p-pci,fsdev=test_mount,mount_tag=test_mount -smp 4 -m |
124 | 2048"` | 119 | 2048"` |
125 | 120 | ||
126 | Patchtest runs as an initscript for the core-image-patchtest image and | 121 | Patchtest is run by an initscript for the core-image-patchtest image and |
127 | shuts down after completion, so there is no input required from a user | 122 | shuts down after completion, so there is no input required from a user |
128 | during operation. Unlike in host mode, the guest is designed to | 123 | during operation. Unlike in host mode, the guest is designed to |
129 | automatically generate test result files, in the same directory as the | 124 | automatically generate test result files, in the same directory as the |
@@ -131,6 +126,17 @@ targeted patch files but with .testresult as an extension. These contain | |||
131 | the entire output of the patchtest run for each respective pass, | 126 | the entire output of the patchtest run for each respective pass, |
132 | including the PASS, FAIL, and SKIP indicators for each test run. | 127 | including the PASS, FAIL, and SKIP indicators for each test run. |
133 | 128 | ||
129 | ### Running Patchtest Selftests | ||
130 | |||
131 | Patchtest also includes selftests, which are currently in the form of | ||
132 | several contrived patch files and a runner script found in | ||
133 | `meta/lib/patchtest/selftest/`. In order to run these, the | ||
134 | `meta-selftest` layer must be added to bblayers.conf. It is also | ||
135 | recommended to set BB_SERVER_TIMEOUT (and thus enable memory-resident | ||
136 | bitbake) in local.conf to reduce runtime, as the bitbake startup process | ||
137 | will otherwise add to it significantly when restarted for each test | ||
138 | patch. | ||
139 | |||
134 | ## Contributing | 140 | ## Contributing |
135 | 141 | ||
136 | The yocto mailing list (openembedded-core@lists.openembedded.org) is used for questions, | 142 | The yocto mailing list (openembedded-core@lists.openembedded.org) is used for questions, |
diff --git a/scripts/pull-sdpx-licenses.py b/scripts/pull-sdpx-licenses.py new file mode 100755 index 0000000000..597a62133f --- /dev/null +++ b/scripts/pull-sdpx-licenses.py | |||
@@ -0,0 +1,101 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | # | ||
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | |||
7 | import argparse | ||
8 | import json | ||
9 | import sys | ||
10 | import urllib.request | ||
11 | from pathlib import Path | ||
12 | |||
13 | TOP_DIR = Path(__file__).parent.parent | ||
14 | |||
15 | |||
16 | def main(): | ||
17 | parser = argparse.ArgumentParser( | ||
18 | description="Update SPDX License files from upstream" | ||
19 | ) | ||
20 | parser.add_argument( | ||
21 | "-v", | ||
22 | "--version", | ||
23 | metavar="MAJOR.MINOR[.MICRO]", | ||
24 | help="Pull specific version of License list instead of latest", | ||
25 | ) | ||
26 | parser.add_argument( | ||
27 | "--overwrite", | ||
28 | action="store_true", | ||
29 | help="Update existing license file text with upstream text", | ||
30 | ) | ||
31 | parser.add_argument( | ||
32 | "--deprecated", | ||
33 | action="store_true", | ||
34 | help="Update deprecated licenses", | ||
35 | ) | ||
36 | parser.add_argument( | ||
37 | "--dest", | ||
38 | type=Path, | ||
39 | default=TOP_DIR / "meta" / "files" / "common-licenses", | ||
40 | help="Write licenses to directory DEST. Default is %(default)s", | ||
41 | ) | ||
42 | |||
43 | args = parser.parse_args() | ||
44 | |||
45 | if args.version: | ||
46 | version = f"v{args.version}" | ||
47 | else: | ||
48 | # Fetch the latest release | ||
49 | req = urllib.request.Request( | ||
50 | "https://api.github.com/repos/spdx/license-list-data/releases/latest" | ||
51 | ) | ||
52 | req.add_header("X-GitHub-Api-Version", "2022-11-28") | ||
53 | req.add_header("Accept", "application/vnd.github+json") | ||
54 | with urllib.request.urlopen(req) as response: | ||
55 | data = json.load(response) | ||
56 | version = data["tag_name"] | ||
57 | |||
58 | print(f"Pulling SPDX license list version {version}") | ||
59 | req = urllib.request.Request( | ||
60 | f"https://raw.githubusercontent.com/spdx/license-list-data/{version}/json/licenses.json" | ||
61 | ) | ||
62 | with urllib.request.urlopen(req) as response: | ||
63 | spdx_licenses = json.load(response) | ||
64 | |||
65 | with (TOP_DIR / "meta" / "files" / "spdx-licenses.json").open("w") as f: | ||
66 | json.dump(spdx_licenses, f, sort_keys=True, indent=2) | ||
67 | |||
68 | total_count = len(spdx_licenses["licenses"]) | ||
69 | updated = 0 | ||
70 | for idx, lic in enumerate(spdx_licenses["licenses"]): | ||
71 | lic_id = lic["licenseId"] | ||
72 | |||
73 | print(f"[{idx + 1} of {total_count}] ", end="") | ||
74 | |||
75 | dest_license_file = args.dest / lic_id | ||
76 | if dest_license_file.is_file() and not args.overwrite: | ||
77 | print(f"Skipping {lic_id} since it already exists") | ||
78 | continue | ||
79 | |||
80 | print(f"Fetching {lic_id}... ", end="", flush=True) | ||
81 | |||
82 | req = urllib.request.Request(lic["detailsUrl"]) | ||
83 | with urllib.request.urlopen(req) as response: | ||
84 | lic_data = json.load(response) | ||
85 | |||
86 | if lic_data["isDeprecatedLicenseId"] and not args.deprecated: | ||
87 | print("Skipping (deprecated)") | ||
88 | continue | ||
89 | |||
90 | with dest_license_file.open("w") as f: | ||
91 | f.write(lic_data["licenseText"]) | ||
92 | updated += 1 | ||
93 | print("done") | ||
94 | |||
95 | print(f"Updated {updated} licenses") | ||
96 | |||
97 | return 0 | ||
98 | |||
99 | |||
100 | if __name__ == "__main__": | ||
101 | sys.exit(main()) | ||
diff --git a/scripts/pybootchartgui/pybootchartgui/draw.py b/scripts/pybootchartgui/pybootchartgui/draw.py index c6e67833ab..16739a0fa1 100644 --- a/scripts/pybootchartgui/pybootchartgui/draw.py +++ b/scripts/pybootchartgui/pybootchartgui/draw.py | |||
@@ -69,6 +69,11 @@ CPU_COLOR = (0.40, 0.55, 0.70, 1.0) | |||
69 | IO_COLOR = (0.76, 0.48, 0.48, 0.5) | 69 | IO_COLOR = (0.76, 0.48, 0.48, 0.5) |
70 | # Disk throughput color. | 70 | # Disk throughput color. |
71 | DISK_TPUT_COLOR = (0.20, 0.71, 0.20, 1.0) | 71 | DISK_TPUT_COLOR = (0.20, 0.71, 0.20, 1.0) |
72 | |||
73 | BYTES_RECEIVED_COLOR = (0.0, 0.0, 1.0, 1.0) | ||
74 | BYTES_TRANSMITTED_COLOR = (1.0, 0.0, 0.0, 1.0) | ||
75 | BYTES_RECEIVE_DIFF_COLOR = (0.0, 0.0, 1.0, 0.3) | ||
76 | BYTES_TRANSMIT_DIFF_COLOR = (1.0, 0.0, 0.0, 0.3) | ||
72 | # CPU load chart color. | 77 | # CPU load chart color. |
73 | FILE_OPEN_COLOR = (0.20, 0.71, 0.71, 1.0) | 78 | FILE_OPEN_COLOR = (0.20, 0.71, 0.71, 1.0) |
74 | # Mem cached color | 79 | # Mem cached color |
@@ -437,6 +442,49 @@ def render_charts(ctx, options, clip, trace, curr_y, w, h, sec_w): | |||
437 | 442 | ||
438 | curr_y = curr_y + 30 + bar_h | 443 | curr_y = curr_y + 30 + bar_h |
439 | 444 | ||
445 | if trace.net_stats: | ||
446 | for iface, samples in trace.net_stats.items(): | ||
447 | max_received_sample = max(samples, key=lambda s: s.received_bytes) | ||
448 | max_transmitted_sample = max(samples, key=lambda s: s.transmitted_bytes) | ||
449 | max_receive_diff_sample = max(samples, key=lambda s: s.receive_diff) | ||
450 | max_transmit_diff_sample = max(samples, key=lambda s: s.transmit_diff) | ||
451 | |||
452 | draw_text(ctx, "Iface: %s" % (iface), TEXT_COLOR, off_x, curr_y+20) | ||
453 | draw_legend_line(ctx, "Bytes received (max %d)" % (max_received_sample.received_bytes), | ||
454 | BYTES_RECEIVED_COLOR, off_x+150, curr_y+20, leg_s) | ||
455 | draw_legend_line(ctx, "Bytes transmitted (max %d)" % (max_transmitted_sample.transmitted_bytes), | ||
456 | BYTES_TRANSMITTED_COLOR, off_x+400, curr_y+20, leg_s) | ||
457 | draw_legend_box(ctx, "Bytes receive diff (max %d)" % (max_receive_diff_sample.receive_diff), | ||
458 | BYTES_RECEIVE_DIFF_COLOR, off_x+650, curr_y+20, leg_s) | ||
459 | draw_legend_box(ctx, "Bytes transmit diff (max %d)" % (max_transmit_diff_sample.transmit_diff), | ||
460 | BYTES_TRANSMIT_DIFF_COLOR, off_x+900, curr_y+20, leg_s) | ||
461 | |||
462 | |||
463 | chart_rect = (off_x, curr_y + 30, w, bar_h) | ||
464 | if clip_visible(clip, chart_rect): | ||
465 | draw_box_ticks(ctx, chart_rect, sec_w) | ||
466 | draw_annotations(ctx, proc_tree, trace.times, chart_rect) | ||
467 | |||
468 | if clip_visible (clip, chart_rect): | ||
469 | draw_chart (ctx, BYTES_RECEIVED_COLOR, False, chart_rect, \ | ||
470 | [(sample.time, sample.received_bytes) for sample in samples], \ | ||
471 | proc_tree, None) | ||
472 | |||
473 | draw_chart (ctx, BYTES_TRANSMITTED_COLOR, False, chart_rect, \ | ||
474 | [(sample.time, sample.transmitted_bytes) for sample in samples], \ | ||
475 | proc_tree, None) | ||
476 | |||
477 | if clip_visible (clip, chart_rect): | ||
478 | draw_chart (ctx, BYTES_RECEIVE_DIFF_COLOR, True, chart_rect, \ | ||
479 | [(sample.time, sample.receive_diff) for sample in samples], \ | ||
480 | proc_tree, None) | ||
481 | |||
482 | draw_chart (ctx, BYTES_TRANSMIT_DIFF_COLOR, True, chart_rect, \ | ||
483 | [(sample.time, sample.transmit_diff) for sample in samples], \ | ||
484 | proc_tree, None) | ||
485 | |||
486 | curr_y = curr_y + 30 + bar_h | ||
487 | |||
440 | # render CPU pressure chart | 488 | # render CPU pressure chart |
441 | if trace.cpu_pressure: | 489 | if trace.cpu_pressure: |
442 | max_sample_avg = max (trace.cpu_pressure, key = lambda s: s.avg10) | 490 | max_sample_avg = max (trace.cpu_pressure, key = lambda s: s.avg10) |
diff --git a/scripts/pybootchartgui/pybootchartgui/parsing.py b/scripts/pybootchartgui/pybootchartgui/parsing.py index 63a53b6b88..72a54c6ba5 100644 --- a/scripts/pybootchartgui/pybootchartgui/parsing.py +++ b/scripts/pybootchartgui/pybootchartgui/parsing.py | |||
@@ -48,6 +48,7 @@ class Trace: | |||
48 | self.filename = None | 48 | self.filename = None |
49 | self.parent_map = None | 49 | self.parent_map = None |
50 | self.mem_stats = [] | 50 | self.mem_stats = [] |
51 | self.net_stats = [] | ||
51 | self.monitor_disk = None | 52 | self.monitor_disk = None |
52 | self.cpu_pressure = [] | 53 | self.cpu_pressure = [] |
53 | self.io_pressure = [] | 54 | self.io_pressure = [] |
@@ -457,7 +458,7 @@ def _parse_proc_disk_stat_log(file): | |||
457 | not sda1, sda2 etc. The format of relevant lines should be: | 458 | not sda1, sda2 etc. The format of relevant lines should be: |
458 | {major minor name rio rmerge rsect ruse wio wmerge wsect wuse running use aveq} | 459 | {major minor name rio rmerge rsect ruse wio wmerge wsect wuse running use aveq} |
459 | """ | 460 | """ |
460 | disk_regex_re = re.compile ('^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$') | 461 | disk_regex_re = re.compile (r'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$') |
461 | 462 | ||
462 | # this gets called an awful lot. | 463 | # this gets called an awful lot. |
463 | def is_relevant_line(linetokens): | 464 | def is_relevant_line(linetokens): |
@@ -557,6 +558,21 @@ def _parse_monitor_disk_log(file): | |||
557 | 558 | ||
558 | return disk_stats | 559 | return disk_stats |
559 | 560 | ||
561 | |||
562 | def _parse_reduced_net_log(file): | ||
563 | net_stats = {} | ||
564 | for time, lines in _parse_timed_blocks(file): | ||
565 | |||
566 | for line in lines: | ||
567 | parts = line.split() | ||
568 | iface = parts[0][:-1] | ||
569 | if iface not in net_stats: | ||
570 | net_stats[iface] = [NetSample(time, iface, int(parts[1]), int(parts[2]), int(parts[3]), int(parts[4]))] | ||
571 | else: | ||
572 | net_stats[iface].append(NetSample(time, iface, int(parts[1]), int(parts[2]), int(parts[3]), int(parts[4]))) | ||
573 | return net_stats | ||
574 | |||
575 | |||
560 | def _parse_pressure_logs(file, filename): | 576 | def _parse_pressure_logs(file, filename): |
561 | """ | 577 | """ |
562 | Parse file for "some" pressure with 'avg10', 'avg60' 'avg300' and delta total values | 578 | Parse file for "some" pressure with 'avg10', 'avg60' 'avg300' and delta total values |
@@ -594,8 +610,8 @@ def _parse_pressure_logs(file, filename): | |||
594 | # [ 0.039993] calling migration_init+0x0/0x6b @ 1 | 610 | # [ 0.039993] calling migration_init+0x0/0x6b @ 1 |
595 | # [ 0.039993] initcall migration_init+0x0/0x6b returned 1 after 0 usecs | 611 | # [ 0.039993] initcall migration_init+0x0/0x6b returned 1 after 0 usecs |
596 | def _parse_dmesg(writer, file): | 612 | def _parse_dmesg(writer, file): |
597 | timestamp_re = re.compile ("^\[\s*(\d+\.\d+)\s*]\s+(.*)$") | 613 | timestamp_re = re.compile (r"^\[\s*(\d+\.\d+)\s*]\s+(.*)$") |
598 | split_re = re.compile ("^(\S+)\s+([\S\+_-]+) (.*)$") | 614 | split_re = re.compile (r"^(\S+)\s+([\S\+_-]+) (.*)$") |
599 | processMap = {} | 615 | processMap = {} |
600 | idx = 0 | 616 | idx = 0 |
601 | inc = 1.0 / 1000000 | 617 | inc = 1.0 / 1000000 |
@@ -640,7 +656,7 @@ def _parse_dmesg(writer, file): | |||
640 | # print "foo: '%s' '%s' '%s'" % (type, func, rest) | 656 | # print "foo: '%s' '%s' '%s'" % (type, func, rest) |
641 | if type == "calling": | 657 | if type == "calling": |
642 | ppid = kernel.pid | 658 | ppid = kernel.pid |
643 | p = re.match ("\@ (\d+)", rest) | 659 | p = re.match (r"\@ (\d+)", rest) |
644 | if p is not None: | 660 | if p is not None: |
645 | ppid = float (p.group(1)) // 1000 | 661 | ppid = float (p.group(1)) // 1000 |
646 | # print "match: '%s' ('%g') at '%s'" % (func, ppid, time_ms) | 662 | # print "match: '%s' ('%g') at '%s'" % (func, ppid, time_ms) |
@@ -742,7 +758,7 @@ def get_num_cpus(headers): | |||
742 | cpu_model = headers.get("system.cpu") | 758 | cpu_model = headers.get("system.cpu") |
743 | if cpu_model is None: | 759 | if cpu_model is None: |
744 | return 1 | 760 | return 1 |
745 | mat = re.match(".*\\((\\d+)\\)", cpu_model) | 761 | mat = re.match(r".*\\((\\d+)\\)", cpu_model) |
746 | if mat is None: | 762 | if mat is None: |
747 | return 1 | 763 | return 1 |
748 | return max (int(mat.group(1)), 1) | 764 | return max (int(mat.group(1)), 1) |
@@ -767,6 +783,8 @@ def _do_parse(writer, state, filename, file): | |||
767 | state.cmdline = _parse_cmdline_log(writer, file) | 783 | state.cmdline = _parse_cmdline_log(writer, file) |
768 | elif name == "monitor_disk.log": | 784 | elif name == "monitor_disk.log": |
769 | state.monitor_disk = _parse_monitor_disk_log(file) | 785 | state.monitor_disk = _parse_monitor_disk_log(file) |
786 | elif name == "reduced_proc_net.log": | ||
787 | state.net_stats = _parse_reduced_net_log(file) | ||
770 | #pressure logs are in a subdirectory | 788 | #pressure logs are in a subdirectory |
771 | elif name == "cpu.log": | 789 | elif name == "cpu.log": |
772 | state.cpu_pressure = _parse_pressure_logs(file, name) | 790 | state.cpu_pressure = _parse_pressure_logs(file, name) |
diff --git a/scripts/pybootchartgui/pybootchartgui/samples.py b/scripts/pybootchartgui/pybootchartgui/samples.py index a70d8a5a28..7c92d2ce6a 100644 --- a/scripts/pybootchartgui/pybootchartgui/samples.py +++ b/scripts/pybootchartgui/pybootchartgui/samples.py | |||
@@ -37,6 +37,16 @@ class CPUSample: | |||
37 | return str(self.time) + "\t" + str(self.user) + "\t" + \ | 37 | return str(self.time) + "\t" + str(self.user) + "\t" + \ |
38 | str(self.sys) + "\t" + str(self.io) + "\t" + str (self.swap) | 38 | str(self.sys) + "\t" + str(self.io) + "\t" + str (self.swap) |
39 | 39 | ||
40 | |||
41 | class NetSample: | ||
42 | def __init__(self, time, iface, received_bytes, transmitted_bytes, receive_diff, transmit_diff): | ||
43 | self.time = time | ||
44 | self.iface = iface | ||
45 | self.received_bytes = received_bytes | ||
46 | self.transmitted_bytes = transmitted_bytes | ||
47 | self.receive_diff = receive_diff | ||
48 | self.transmit_diff = transmit_diff | ||
49 | |||
40 | class CPUPressureSample: | 50 | class CPUPressureSample: |
41 | def __init__(self, time, avg10, avg60, avg300, deltaTotal): | 51 | def __init__(self, time, avg10, avg60, avg300, deltaTotal): |
42 | self.time = time | 52 | self.time = time |
diff --git a/scripts/relocate_sdk.py b/scripts/relocate_sdk.py index 8a728720ba..9e01c09cb0 100755 --- a/scripts/relocate_sdk.py +++ b/scripts/relocate_sdk.py | |||
@@ -49,6 +49,34 @@ def get_arch(): | |||
49 | elif ei_class == 2: | 49 | elif ei_class == 2: |
50 | return 64 | 50 | return 64 |
51 | 51 | ||
52 | def get_dl_arch(dl_path): | ||
53 | try: | ||
54 | with open(dl_path, "r+b") as f: | ||
55 | e_ident =f.read(16) | ||
56 | except IOError: | ||
57 | exctype, ioex = sys.exc_info()[:2] | ||
58 | if ioex.errno == errno.ETXTBSY: | ||
59 | print("Could not open %s. File used by another process.\nPlease "\ | ||
60 | "make sure you exit all processes that might use any SDK "\ | ||
61 | "binaries." % e) | ||
62 | else: | ||
63 | print("Could not open %s: %s(%d)" % (e, ioex.strerror, ioex.errno)) | ||
64 | sys.exit(-1) | ||
65 | |||
66 | ei_mag0,ei_mag1_3,ei_class,ei_data,ei_version = struct.unpack("<B3sBBB9x", e_ident) | ||
67 | |||
68 | if (ei_mag0 != 0x7f and ei_mag1_3 != "ELF") or ei_class == 0: | ||
69 | print("ERROR: unknow %s" % dl_path) | ||
70 | sys.exit(-1) | ||
71 | |||
72 | if ei_class == 1: | ||
73 | arch = 32 | ||
74 | elif ei_class == 2: | ||
75 | arch = 64 | ||
76 | |||
77 | return arch | ||
78 | |||
79 | |||
52 | def parse_elf_header(): | 80 | def parse_elf_header(): |
53 | global e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, e_flags,\ | 81 | global e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, e_flags,\ |
54 | e_ehsize, e_phentsize, e_phnum, e_shentsize, e_shnum, e_shstrndx | 82 | e_ehsize, e_phentsize, e_phnum, e_shentsize, e_shnum, e_shstrndx |
@@ -223,6 +251,8 @@ else: | |||
223 | 251 | ||
224 | executables_list = sys.argv[3:] | 252 | executables_list = sys.argv[3:] |
225 | 253 | ||
254 | dl_arch = get_dl_arch(new_dl_path) | ||
255 | |||
226 | errors = False | 256 | errors = False |
227 | for e in executables_list: | 257 | for e in executables_list: |
228 | perms = os.stat(e)[stat.ST_MODE] | 258 | perms = os.stat(e)[stat.ST_MODE] |
@@ -247,7 +277,7 @@ for e in executables_list: | |||
247 | old_size = os.path.getsize(e) | 277 | old_size = os.path.getsize(e) |
248 | if old_size >= 64: | 278 | if old_size >= 64: |
249 | arch = get_arch() | 279 | arch = get_arch() |
250 | if arch: | 280 | if arch and arch == dl_arch: |
251 | parse_elf_header() | 281 | parse_elf_header() |
252 | if not change_interpreter(e): | 282 | if not change_interpreter(e): |
253 | errors = True | 283 | errors = True |
diff --git a/scripts/resulttool b/scripts/resulttool index fc282bda6c..66a6af9959 100755 --- a/scripts/resulttool +++ b/scripts/resulttool | |||
@@ -15,6 +15,9 @@ | |||
15 | # To report test report, execute the below | 15 | # To report test report, execute the below |
16 | # $ resulttool report <source_dir> | 16 | # $ resulttool report <source_dir> |
17 | # | 17 | # |
18 | # To create a unit test report in JUnit XML format, execute the below | ||
19 | # $ resulttool junit <json_file> | ||
20 | # | ||
18 | # To perform regression file analysis, execute the below | 21 | # To perform regression file analysis, execute the below |
19 | # $ resulttool regression-file <base_result_file> <target_result_file> | 22 | # $ resulttool regression-file <base_result_file> <target_result_file> |
20 | # | 23 | # |
@@ -43,6 +46,7 @@ import resulttool.regression | |||
43 | import resulttool.report | 46 | import resulttool.report |
44 | import resulttool.manualexecution | 47 | import resulttool.manualexecution |
45 | import resulttool.log | 48 | import resulttool.log |
49 | import resulttool.junit | ||
46 | logger = scriptutils.logger_create('resulttool') | 50 | logger = scriptutils.logger_create('resulttool') |
47 | 51 | ||
48 | def main(): | 52 | def main(): |
@@ -61,6 +65,7 @@ def main(): | |||
61 | resulttool.regression.register_commands(subparsers) | 65 | resulttool.regression.register_commands(subparsers) |
62 | resulttool.report.register_commands(subparsers) | 66 | resulttool.report.register_commands(subparsers) |
63 | resulttool.log.register_commands(subparsers) | 67 | resulttool.log.register_commands(subparsers) |
68 | resulttool.junit.register_commands(subparsers) | ||
64 | 69 | ||
65 | args = parser.parse_args() | 70 | args = parser.parse_args() |
66 | if args.debug: | 71 | if args.debug: |
diff --git a/scripts/runqemu b/scripts/runqemu index 69cd44864e..3d77046972 100755 --- a/scripts/runqemu +++ b/scripts/runqemu | |||
@@ -468,9 +468,11 @@ class BaseConfig(object): | |||
468 | self.set("IMAGE_LINK_NAME", image_link_name) | 468 | self.set("IMAGE_LINK_NAME", image_link_name) |
469 | logger.debug('Using IMAGE_LINK_NAME = "%s"' % image_link_name) | 469 | logger.debug('Using IMAGE_LINK_NAME = "%s"' % image_link_name) |
470 | 470 | ||
471 | def set_dri_path(self): | 471 | def set_mesa_paths(self): |
472 | drivers_path = os.path.join(self.bindir_native, '../lib/dri') | 472 | drivers_path = os.path.join(self.bindir_native, '../lib/dri') |
473 | if not os.path.exists(drivers_path) or not os.listdir(drivers_path): | 473 | gbm_path = os.path.join(self.bindir_native, '../lib/gbm') |
474 | if not os.path.exists(drivers_path) or not os.listdir(drivers_path) \ | ||
475 | or not os.path.exists(gbm_path) or not os.listdir(gbm_path): | ||
474 | raise RunQemuError(""" | 476 | raise RunQemuError(""" |
475 | qemu has been built without opengl support and accelerated graphics support is not available. | 477 | qemu has been built without opengl support and accelerated graphics support is not available. |
476 | To enable it, add: | 478 | To enable it, add: |
@@ -479,6 +481,7 @@ DISTRO_FEATURES_NATIVESDK:append = " opengl" | |||
479 | to your build configuration. | 481 | to your build configuration. |
480 | """) | 482 | """) |
481 | self.qemu_environ['LIBGL_DRIVERS_PATH'] = drivers_path | 483 | self.qemu_environ['LIBGL_DRIVERS_PATH'] = drivers_path |
484 | self.qemu_environ['GBM_BACKENDS_PATH'] = gbm_path | ||
482 | 485 | ||
483 | def check_args(self): | 486 | def check_args(self): |
484 | for debug in ("-d", "--debug"): | 487 | for debug in ("-d", "--debug"): |
@@ -1192,19 +1195,22 @@ to your build configuration. | |||
1192 | raise RunQemuError("a new one with sudo.") | 1195 | raise RunQemuError("a new one with sudo.") |
1193 | 1196 | ||
1194 | gid = os.getgid() | 1197 | gid = os.getgid() |
1195 | uid = os.getuid() | ||
1196 | logger.info("Setting up tap interface under sudo") | 1198 | logger.info("Setting up tap interface under sudo") |
1197 | cmd = ('sudo', self.qemuifup, str(gid)) | 1199 | cmd = ('sudo', self.qemuifup, str(gid)) |
1198 | try: | 1200 | for _ in range(5): |
1199 | tap = subprocess.check_output(cmd).decode('utf-8').strip() | 1201 | try: |
1200 | except subprocess.CalledProcessError as e: | 1202 | tap = subprocess.check_output(cmd).decode('utf-8').strip() |
1201 | logger.error('Setting up tap device failed:\n%s\nRun runqemu-gen-tapdevs to manually create one.' % str(e)) | 1203 | except subprocess.CalledProcessError as e: |
1202 | sys.exit(1) | 1204 | logger.error('Setting up tap device failed:\n%s\nRun runqemu-gen-tapdevs to manually create one.' % str(e)) |
1203 | lockfile = os.path.join(lockdir, tap) | 1205 | sys.exit(1) |
1204 | self.taplock = lockfile + '.lock' | 1206 | lockfile = os.path.join(lockdir, tap) |
1205 | self.acquire_taplock() | 1207 | self.taplock = lockfile + '.lock' |
1206 | self.cleantap = True | 1208 | if self.acquire_taplock(): |
1207 | logger.debug('Created tap: %s' % tap) | 1209 | self.cleantap = True |
1210 | logger.debug('Created tap: %s' % tap) | ||
1211 | break | ||
1212 | else: | ||
1213 | tap = None | ||
1208 | 1214 | ||
1209 | if not tap: | 1215 | if not tap: |
1210 | logger.error("Failed to setup tap device. Run runqemu-gen-tapdevs to manually create.") | 1216 | logger.error("Failed to setup tap device. Run runqemu-gen-tapdevs to manually create.") |
@@ -1295,6 +1301,10 @@ to your build configuration. | |||
1295 | elif drive_type.startswith("/dev/hd"): | 1301 | elif drive_type.startswith("/dev/hd"): |
1296 | logger.info('Using ide drive') | 1302 | logger.info('Using ide drive') |
1297 | vm_drive = "-drive file=%s,format=%s" % (self.rootfs, rootfs_format) | 1303 | vm_drive = "-drive file=%s,format=%s" % (self.rootfs, rootfs_format) |
1304 | elif drive_type.startswith("/dev/mmcblk"): | ||
1305 | logger.info('Using sdcard drive') | ||
1306 | vm_drive = '-drive id=sdcard0,if=none,file=%s,format=%s -device sdhci-pci -device sd-card,drive=sdcard0' \ | ||
1307 | % (self.rootfs, rootfs_format) | ||
1298 | elif drive_type.startswith("/dev/vdb"): | 1308 | elif drive_type.startswith("/dev/vdb"): |
1299 | logger.info('Using block virtio drive'); | 1309 | logger.info('Using block virtio drive'); |
1300 | vm_drive = '-drive id=disk0,file=%s,if=none,format=%s -device virtio-blk-device,drive=disk0%s' \ | 1310 | vm_drive = '-drive id=disk0,file=%s,if=none,format=%s -device virtio-blk-device,drive=disk0%s' \ |
@@ -1454,7 +1464,7 @@ to your build configuration. | |||
1454 | self.qemu_opt += ' -display ' | 1464 | self.qemu_opt += ' -display ' |
1455 | if self.egl_headless == True: | 1465 | if self.egl_headless == True: |
1456 | self.check_render_nodes() | 1466 | self.check_render_nodes() |
1457 | self.set_dri_path() | 1467 | self.set_mesa_paths() |
1458 | self.qemu_opt += 'egl-headless,' | 1468 | self.qemu_opt += 'egl-headless,' |
1459 | else: | 1469 | else: |
1460 | if self.sdl == True: | 1470 | if self.sdl == True: |
@@ -1464,10 +1474,10 @@ to your build configuration. | |||
1464 | self.qemu_opt += 'gtk,' | 1474 | self.qemu_opt += 'gtk,' |
1465 | 1475 | ||
1466 | if self.gl == True: | 1476 | if self.gl == True: |
1467 | self.set_dri_path() | 1477 | self.set_mesa_paths() |
1468 | self.qemu_opt += 'gl=on,' | 1478 | self.qemu_opt += 'gl=on,' |
1469 | elif self.gl_es == True: | 1479 | elif self.gl_es == True: |
1470 | self.set_dri_path() | 1480 | self.set_mesa_paths() |
1471 | self.qemu_opt += 'gl=es,' | 1481 | self.qemu_opt += 'gl=es,' |
1472 | self.qemu_opt += 'show-cursor=on' | 1482 | self.qemu_opt += 'show-cursor=on' |
1473 | 1483 | ||
@@ -1483,7 +1493,7 @@ to your build configuration. | |||
1483 | # If no serial or serialtcp options were specified, only ttyS0 is created | 1493 | # If no serial or serialtcp options were specified, only ttyS0 is created |
1484 | # and sysvinit shows an error trying to enable ttyS1: | 1494 | # and sysvinit shows an error trying to enable ttyS1: |
1485 | # INIT: Id "S1" respawning too fast: disabled for 5 minutes | 1495 | # INIT: Id "S1" respawning too fast: disabled for 5 minutes |
1486 | serial_num = len(re.findall("-serial", self.qemu_opt)) | 1496 | serial_num = len(re.findall("(^| )-serial ", self.qemu_opt)) |
1487 | 1497 | ||
1488 | # Assume if the user passed serial options, they know what they want | 1498 | # Assume if the user passed serial options, they know what they want |
1489 | # and pad to two devices | 1499 | # and pad to two devices |
@@ -1503,7 +1513,7 @@ to your build configuration. | |||
1503 | 1513 | ||
1504 | self.qemu_opt += " %s" % self.get("QB_SERIAL_OPT") | 1514 | self.qemu_opt += " %s" % self.get("QB_SERIAL_OPT") |
1505 | 1515 | ||
1506 | serial_num = len(re.findall("-serial", self.qemu_opt)) | 1516 | serial_num = len(re.findall("(^| )-serial ", self.qemu_opt)) |
1507 | if serial_num < 2: | 1517 | if serial_num < 2: |
1508 | self.qemu_opt += " -serial null" | 1518 | self.qemu_opt += " -serial null" |
1509 | 1519 | ||
@@ -1669,6 +1679,9 @@ to your build configuration. | |||
1669 | if multiconfig: | 1679 | if multiconfig: |
1670 | multiconfig = "mc:%s" % multiconfig | 1680 | multiconfig = "mc:%s" % multiconfig |
1671 | 1681 | ||
1682 | if self.rootfs and not target: | ||
1683 | target = self.rootfs | ||
1684 | |||
1672 | if mach: | 1685 | if mach: |
1673 | cmd = 'MACHINE=%s bitbake -e %s %s' % (mach, multiconfig, target) | 1686 | cmd = 'MACHINE=%s bitbake -e %s %s' % (mach, multiconfig, target) |
1674 | else: | 1687 | else: |
diff --git a/scripts/send-error-report b/scripts/send-error-report index cfbcaa52cb..cc1bc7c2b1 100755 --- a/scripts/send-error-report +++ b/scripts/send-error-report | |||
@@ -6,6 +6,7 @@ | |||
6 | # Copyright (C) 2013 Intel Corporation | 6 | # Copyright (C) 2013 Intel Corporation |
7 | # Author: Andreea Proca <andreea.b.proca@intel.com> | 7 | # Author: Andreea Proca <andreea.b.proca@intel.com> |
8 | # Author: Michael Wood <michael.g.wood@intel.com> | 8 | # Author: Michael Wood <michael.g.wood@intel.com> |
9 | # Author: Thomas Perrot <thomas.perrot@bootlin.com> | ||
9 | # | 10 | # |
10 | # SPDX-License-Identifier: GPL-2.0-only | 11 | # SPDX-License-Identifier: GPL-2.0-only |
11 | # | 12 | # |
@@ -22,7 +23,7 @@ scripts_lib_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'li | |||
22 | sys.path.insert(0, scripts_lib_path) | 23 | sys.path.insert(0, scripts_lib_path) |
23 | import argparse_oe | 24 | import argparse_oe |
24 | 25 | ||
25 | version = "0.3" | 26 | version = "0.4" |
26 | 27 | ||
27 | log = logging.getLogger("send-error-report") | 28 | log = logging.getLogger("send-error-report") |
28 | logging.basicConfig(format='%(levelname)s: %(message)s') | 29 | logging.basicConfig(format='%(levelname)s: %(message)s') |
@@ -65,7 +66,7 @@ def edit_content(json_file_path): | |||
65 | 66 | ||
66 | def prepare_data(args): | 67 | def prepare_data(args): |
67 | # attempt to get the max_log_size from the server's settings | 68 | # attempt to get the max_log_size from the server's settings |
68 | max_log_size = getPayloadLimit(args.protocol+args.server+"/ClientPost/JSON") | 69 | max_log_size = getPayloadLimit(args.server+"/ClientPost/JSON") |
69 | 70 | ||
70 | if not os.path.isfile(args.error_file): | 71 | if not os.path.isfile(args.error_file): |
71 | log.error("No data file found.") | 72 | log.error("No data file found.") |
@@ -135,19 +136,38 @@ def send_data(data, args): | |||
135 | headers={'Content-type': 'application/json', 'User-Agent': "send-error-report/"+version} | 136 | headers={'Content-type': 'application/json', 'User-Agent': "send-error-report/"+version} |
136 | 137 | ||
137 | if args.json: | 138 | if args.json: |
138 | url = args.protocol+args.server+"/ClientPost/JSON/" | 139 | url = args.server+"/ClientPost/JSON/" |
139 | else: | 140 | else: |
140 | url = args.protocol+args.server+"/ClientPost/" | 141 | url = args.server+"/ClientPost/" |
141 | 142 | ||
142 | req = urllib.request.Request(url, data=data, headers=headers) | 143 | req = urllib.request.Request(url, data=data, headers=headers) |
144 | |||
145 | log.debug(f"Request URL: {url}") | ||
146 | log.debug(f"Request Headers: {headers}") | ||
147 | log.debug(f"Request Data: {data.decode('utf-8')}") | ||
148 | |||
143 | try: | 149 | try: |
144 | response = urllib.request.urlopen(req) | 150 | response = urllib.request.urlopen(req) |
145 | except urllib.error.HTTPError as e: | 151 | except urllib.error.HTTPError as e: |
146 | logging.error(str(e)) | 152 | log.error(f"HTTP Error {e.code}: {e.reason}") |
153 | log.debug(f"Response Content: {e.read().decode('utf-8')}") | ||
147 | sys.exit(1) | 154 | sys.exit(1) |
148 | 155 | ||
156 | log.debug(f"Response Status: {response.status}") | ||
157 | log.debug(f"Response Headers: {response.getheaders()}") | ||
149 | print(response.read().decode('utf-8')) | 158 | print(response.read().decode('utf-8')) |
150 | 159 | ||
160 | def validate_server_url(args): | ||
161 | # Get the error report server from an argument | ||
162 | server = args.server or 'https://errors.yoctoproject.org' | ||
163 | |||
164 | if not server.startswith('http://') and not server.startswith('https://'): | ||
165 | log.error("Missing a URL scheme either http:// or https:// in the server name: " + server) | ||
166 | sys.exit(1) | ||
167 | |||
168 | # Construct the final URL | ||
169 | return f"{server}" | ||
170 | |||
151 | 171 | ||
152 | if __name__ == '__main__': | 172 | if __name__ == '__main__': |
153 | arg_parse = argparse_oe.ArgumentParser(description="This scripts will send an error report to your specified error-report-web server.") | 173 | arg_parse = argparse_oe.ArgumentParser(description="This scripts will send an error report to your specified error-report-web server.") |
@@ -164,8 +184,7 @@ if __name__ == '__main__': | |||
164 | arg_parse.add_argument("-s", | 184 | arg_parse.add_argument("-s", |
165 | "--server", | 185 | "--server", |
166 | help="Server to send error report to", | 186 | help="Server to send error report to", |
167 | type=str, | 187 | type=str) |
168 | default="errors.yoctoproject.org") | ||
169 | 188 | ||
170 | arg_parse.add_argument("-e", | 189 | arg_parse.add_argument("-e", |
171 | "--email", | 190 | "--email", |
@@ -190,18 +209,22 @@ if __name__ == '__main__': | |||
190 | help="Return the result in json format, silences all other output", | 209 | help="Return the result in json format, silences all other output", |
191 | action="store_true") | 210 | action="store_true") |
192 | 211 | ||
193 | arg_parse.add_argument("--no-ssl", | 212 | arg_parse.add_argument("-d", |
194 | help="Use http instead of https protocol", | 213 | "--debug", |
195 | dest="protocol", | 214 | help="Enable debug mode to print request/response details", |
196 | action="store_const", const="http://", default="https://") | 215 | action="store_true") |
197 | |||
198 | |||
199 | 216 | ||
200 | args = arg_parse.parse_args() | 217 | args = arg_parse.parse_args() |
201 | 218 | ||
219 | args.server = validate_server_url(args) | ||
220 | |||
202 | if (args.json == False): | 221 | if (args.json == False): |
203 | print("Preparing to send errors to: "+args.server) | 222 | print("Preparing to send errors to: "+args.server) |
204 | 223 | ||
224 | # Enable debugging if requested | ||
225 | if args.debug: | ||
226 | log.setLevel(logging.DEBUG) | ||
227 | |||
205 | data = prepare_data(args) | 228 | data = prepare_data(args) |
206 | send_data(data, args) | 229 | send_data(data, args) |
207 | 230 | ||
diff --git a/scripts/sstate-cache-management.py b/scripts/sstate-cache-management.py index d3f600bd28..303b8f13a3 100755 --- a/scripts/sstate-cache-management.py +++ b/scripts/sstate-cache-management.py | |||
@@ -268,6 +268,10 @@ def parse_arguments(): | |||
268 | # ) | 268 | # ) |
269 | 269 | ||
270 | parser.add_argument( | 270 | parser.add_argument( |
271 | "-n", "--dry-run", action="store_true", help="Don't execute, just go through the motions." | ||
272 | ) | ||
273 | |||
274 | parser.add_argument( | ||
271 | "-y", | 275 | "-y", |
272 | "--yes", | 276 | "--yes", |
273 | action="store_true", | 277 | action="store_true", |
@@ -314,6 +318,9 @@ def main(): | |||
314 | if args.debug >= 1: | 318 | if args.debug >= 1: |
315 | print("\n".join([str(p.path) for p in remove])) | 319 | print("\n".join([str(p.path) for p in remove])) |
316 | print(f"{len(remove)} out of {len(paths)} files will be removed!") | 320 | print(f"{len(remove)} out of {len(paths)} files will be removed!") |
321 | if args.dry_run: | ||
322 | return 0 | ||
323 | |||
317 | if not args.yes: | 324 | if not args.yes: |
318 | print("Do you want to continue (y/n)?") | 325 | print("Do you want to continue (y/n)?") |
319 | confirm = input() in ("y", "Y") | 326 | confirm = input() in ("y", "Y") |
diff --git a/scripts/test-remote-image b/scripts/test-remote-image index d209d22854..1d018992b0 100755 --- a/scripts/test-remote-image +++ b/scripts/test-remote-image | |||
@@ -152,8 +152,7 @@ class AutoTargetProfile(BaseTargetProfile): | |||
152 | return controller | 152 | return controller |
153 | 153 | ||
154 | def set_kernel_file(self): | 154 | def set_kernel_file(self): |
155 | postconfig = "QA_GET_MACHINE = \"${MACHINE}\"" | 155 | machine = get_bb_var('MACHINE') |
156 | machine = get_bb_var('QA_GET_MACHINE', postconfig=postconfig) | ||
157 | self.kernel_file = self.kernel_type + '-' + machine + '.bin' | 156 | self.kernel_file = self.kernel_type + '-' + machine + '.bin' |
158 | 157 | ||
159 | def set_rootfs_file(self): | 158 | def set_rootfs_file(self): |
@@ -215,13 +214,11 @@ class PublicAB(BaseRepoProfile): | |||
215 | def get_repo_path(self): | 214 | def get_repo_path(self): |
216 | path = '/machines/' | 215 | path = '/machines/' |
217 | 216 | ||
218 | postconfig = "QA_GET_MACHINE = \"${MACHINE}\"" | 217 | machine = get_bb_var('MACHINE') |
219 | machine = get_bb_var('QA_GET_MACHINE', postconfig=postconfig) | ||
220 | if 'qemu' in machine: | 218 | if 'qemu' in machine: |
221 | path += 'qemu/' | 219 | path += 'qemu/' |
222 | 220 | ||
223 | postconfig = "QA_GET_DISTRO = \"${DISTRO}\"" | 221 | distro = get_bb_var('DISTRO') |
224 | distro = get_bb_var('QA_GET_DISTRO', postconfig=postconfig) | ||
225 | path += distro.replace('poky', machine) + '/' | 222 | path += distro.replace('poky', machine) + '/' |
226 | return path | 223 | return path |
227 | 224 | ||
diff --git a/scripts/wic b/scripts/wic index 06e0b48db0..9137208f5e 100755 --- a/scripts/wic +++ b/scripts/wic | |||
@@ -237,6 +237,13 @@ def wic_ls_subcommand(args, usage_str): | |||
237 | Command-line handling for list content of images. | 237 | Command-line handling for list content of images. |
238 | The real work is done by engine.wic_ls() | 238 | The real work is done by engine.wic_ls() |
239 | """ | 239 | """ |
240 | |||
241 | if args.image_name: | ||
242 | BB_VARS.default_image = args.image_name | ||
243 | |||
244 | if args.vars_dir: | ||
245 | BB_VARS.vars_dir = args.vars_dir | ||
246 | |||
240 | engine.wic_ls(args, args.native_sysroot) | 247 | engine.wic_ls(args, args.native_sysroot) |
241 | 248 | ||
242 | def wic_cp_subcommand(args, usage_str): | 249 | def wic_cp_subcommand(args, usage_str): |
@@ -244,6 +251,12 @@ def wic_cp_subcommand(args, usage_str): | |||
244 | Command-line handling for copying files/dirs to images. | 251 | Command-line handling for copying files/dirs to images. |
245 | The real work is done by engine.wic_cp() | 252 | The real work is done by engine.wic_cp() |
246 | """ | 253 | """ |
254 | if args.image_name: | ||
255 | BB_VARS.default_image = args.image_name | ||
256 | |||
257 | if args.vars_dir: | ||
258 | BB_VARS.vars_dir = args.vars_dir | ||
259 | |||
247 | engine.wic_cp(args, args.native_sysroot) | 260 | engine.wic_cp(args, args.native_sysroot) |
248 | 261 | ||
249 | def wic_rm_subcommand(args, usage_str): | 262 | def wic_rm_subcommand(args, usage_str): |
@@ -251,6 +264,12 @@ def wic_rm_subcommand(args, usage_str): | |||
251 | Command-line handling for removing files/dirs from images. | 264 | Command-line handling for removing files/dirs from images. |
252 | The real work is done by engine.wic_rm() | 265 | The real work is done by engine.wic_rm() |
253 | """ | 266 | """ |
267 | if args.image_name: | ||
268 | BB_VARS.default_image = args.image_name | ||
269 | |||
270 | if args.vars_dir: | ||
271 | BB_VARS.vars_dir = args.vars_dir | ||
272 | |||
254 | engine.wic_rm(args, args.native_sysroot) | 273 | engine.wic_rm(args, args.native_sysroot) |
255 | 274 | ||
256 | def wic_write_subcommand(args, usage_str): | 275 | def wic_write_subcommand(args, usage_str): |
@@ -258,6 +277,12 @@ def wic_write_subcommand(args, usage_str): | |||
258 | Command-line handling for writing images. | 277 | Command-line handling for writing images. |
259 | The real work is done by engine.wic_write() | 278 | The real work is done by engine.wic_write() |
260 | """ | 279 | """ |
280 | if args.image_name: | ||
281 | BB_VARS.default_image = args.image_name | ||
282 | |||
283 | if args.vars_dir: | ||
284 | BB_VARS.vars_dir = args.vars_dir | ||
285 | |||
261 | engine.wic_write(args, args.native_sysroot) | 286 | engine.wic_write(args, args.native_sysroot) |
262 | 287 | ||
263 | def wic_help_subcommand(args, usage_str): | 288 | def wic_help_subcommand(args, usage_str): |
@@ -390,6 +415,12 @@ def wic_init_parser_ls(subparser): | |||
390 | help="image spec: <image>[:<vfat partition>[<path>]]") | 415 | help="image spec: <image>[:<vfat partition>[<path>]]") |
391 | subparser.add_argument("-n", "--native-sysroot", | 416 | subparser.add_argument("-n", "--native-sysroot", |
392 | help="path to the native sysroot containing the tools") | 417 | help="path to the native sysroot containing the tools") |
418 | subparser.add_argument("-e", "--image-name", dest="image_name", | ||
419 | help="name of the image to use the artifacts from " | ||
420 | "e.g. core-image-sato") | ||
421 | subparser.add_argument("-v", "--vars", dest='vars_dir', | ||
422 | help="directory with <image>.env files that store " | ||
423 | "bitbake variables") | ||
393 | 424 | ||
394 | def imgpathtype(arg): | 425 | def imgpathtype(arg): |
395 | img = imgtype(arg) | 426 | img = imgtype(arg) |
@@ -404,6 +435,12 @@ def wic_init_parser_cp(subparser): | |||
404 | help="image spec: <image>:<vfat partition>[<path>] or <file>") | 435 | help="image spec: <image>:<vfat partition>[<path>] or <file>") |
405 | subparser.add_argument("-n", "--native-sysroot", | 436 | subparser.add_argument("-n", "--native-sysroot", |
406 | help="path to the native sysroot containing the tools") | 437 | help="path to the native sysroot containing the tools") |
438 | subparser.add_argument("-e", "--image-name", dest="image_name", | ||
439 | help="name of the image to use the artifacts from " | ||
440 | "e.g. core-image-sato") | ||
441 | subparser.add_argument("-v", "--vars", dest='vars_dir', | ||
442 | help="directory with <image>.env files that store " | ||
443 | "bitbake variables") | ||
407 | 444 | ||
408 | def wic_init_parser_rm(subparser): | 445 | def wic_init_parser_rm(subparser): |
409 | subparser.add_argument("path", type=imgpathtype, | 446 | subparser.add_argument("path", type=imgpathtype, |
@@ -413,6 +450,12 @@ def wic_init_parser_rm(subparser): | |||
413 | subparser.add_argument("-r", dest="recursive_delete", action="store_true", default=False, | 450 | subparser.add_argument("-r", dest="recursive_delete", action="store_true", default=False, |
414 | help="remove directories and their contents recursively, " | 451 | help="remove directories and their contents recursively, " |
415 | " this only applies to ext* partition") | 452 | " this only applies to ext* partition") |
453 | subparser.add_argument("-e", "--image-name", dest="image_name", | ||
454 | help="name of the image to use the artifacts from " | ||
455 | "e.g. core-image-sato") | ||
456 | subparser.add_argument("-v", "--vars", dest='vars_dir', | ||
457 | help="directory with <image>.env files that store " | ||
458 | "bitbake variables") | ||
416 | 459 | ||
417 | def expandtype(rules): | 460 | def expandtype(rules): |
418 | """ | 461 | """ |
@@ -454,6 +497,12 @@ def wic_init_parser_write(subparser): | |||
454 | help="expand rules: auto or <partition>:<size>[,<partition>:<size>]") | 497 | help="expand rules: auto or <partition>:<size>[,<partition>:<size>]") |
455 | subparser.add_argument("-n", "--native-sysroot", | 498 | subparser.add_argument("-n", "--native-sysroot", |
456 | help="path to the native sysroot containing the tools") | 499 | help="path to the native sysroot containing the tools") |
500 | subparser.add_argument("--image-name", dest="image_name", | ||
501 | help="name of the image to use the artifacts from " | ||
502 | "e.g. core-image-sato") | ||
503 | subparser.add_argument("-v", "--vars", dest='vars_dir', | ||
504 | help="directory with <image>.env files that store " | ||
505 | "bitbake variables") | ||
457 | 506 | ||
458 | def wic_init_parser_help(subparser): | 507 | def wic_init_parser_help(subparser): |
459 | helpparsers = subparser.add_subparsers(dest='help_topic', help=hlp.wic_usage) | 508 | helpparsers = subparser.add_subparsers(dest='help_topic', help=hlp.wic_usage) |