summaryrefslogtreecommitdiffstats
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/b4-wrapper-poky.py185
-rwxr-xr-xscripts/bitbake-prserv-tool40
-rwxr-xr-xscripts/buildstats-diff26
-rwxr-xr-xscripts/buildstats-summary16
-rwxr-xr-xscripts/clean-hashserver-database77
-rwxr-xr-xscripts/contrib/convert-spdx-licenses.py2
-rwxr-xr-xscripts/contrib/improve_kernel_cve_report.py467
-rwxr-xr-xscripts/contrib/make-spdx-bindings.sh12
-rw-r--r--scripts/contrib/oe-image-files-spdx/.gitignore8
-rw-r--r--scripts/contrib/oe-image-files-spdx/README.md24
-rw-r--r--scripts/contrib/oe-image-files-spdx/pyproject.toml23
-rw-r--r--scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py1
-rw-r--r--scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py86
-rw-r--r--scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py1
-rwxr-xr-xscripts/contrib/patchreview.py11
-rwxr-xr-xscripts/cve-json-to-text.py146
-rwxr-xr-xscripts/devtool88
-rwxr-xr-xscripts/gen-site-config43
-rwxr-xr-xscripts/install-buildtools65
-rw-r--r--scripts/lib/build_perf/html/measurement_chart.html128
-rw-r--r--scripts/lib/build_perf/html/report.html71
-rw-r--r--scripts/lib/checklayer/__init__.py12
-rw-r--r--scripts/lib/checklayer/cases/common.py35
-rw-r--r--scripts/lib/devtool/__init__.py4
-rw-r--r--scripts/lib/devtool/build.py2
-rw-r--r--scripts/lib/devtool/build_sdk.py7
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_code.py1
-rwxr-xr-xscripts/lib/devtool/ide_sdk.py107
-rw-r--r--scripts/lib/devtool/menuconfig.py7
-rw-r--r--scripts/lib/devtool/standard.py93
-rw-r--r--scripts/lib/devtool/upgrade.py68
-rw-r--r--scripts/lib/devtool/utilcmds.py2
-rw-r--r--scripts/lib/recipetool/append.py8
-rw-r--r--scripts/lib/recipetool/create.py253
-rw-r--r--scripts/lib/recipetool/create_go.py677
-rw-r--r--scripts/lib/recipetool/create_npm.py96
-rw-r--r--scripts/lib/recipetool/licenses.csv37
-rw-r--r--scripts/lib/resulttool/junit.py77
-rwxr-xr-xscripts/lib/resulttool/manualexecution.py2
-rw-r--r--scripts/lib/resulttool/regression.py3
-rw-r--r--scripts/lib/resulttool/report.py2
-rw-r--r--scripts/lib/resulttool/resultutils.py76
-rw-r--r--scripts/lib/resulttool/store.py27
-rw-r--r--scripts/lib/scriptutils.py20
-rw-r--r--scripts/lib/wic/canned-wks/common.wks.inc2
-rw-r--r--scripts/lib/wic/canned-wks/directdisk-gpt.wks2
-rw-r--r--scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks2
-rw-r--r--scripts/lib/wic/canned-wks/efi-bootdisk.wks.in2
-rw-r--r--scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in3
-rw-r--r--scripts/lib/wic/canned-wks/mkefidisk.wks4
-rw-r--r--scripts/lib/wic/canned-wks/mkhybridiso.wks2
-rw-r--r--scripts/lib/wic/canned-wks/sdimage-bootpart.wks2
-rw-r--r--scripts/lib/wic/canned-wks/systemd-bootdisk.wks2
-rw-r--r--scripts/lib/wic/engine.py22
-rw-r--r--scripts/lib/wic/help.py42
-rw-r--r--scripts/lib/wic/partition.py52
-rw-r--r--scripts/lib/wic/pluginbase.py2
-rw-r--r--scripts/lib/wic/plugins/imager/direct.py58
-rw-r--r--scripts/lib/wic/plugins/source/bootimg_biosplusefi.py (renamed from scripts/lib/wic/plugins/source/bootimg-biosplusefi.py)30
-rw-r--r--scripts/lib/wic/plugins/source/bootimg_efi.py (renamed from scripts/lib/wic/plugins/source/bootimg-efi.py)208
-rw-r--r--scripts/lib/wic/plugins/source/bootimg_partition.py (renamed from scripts/lib/wic/plugins/source/bootimg-partition.py)43
-rw-r--r--scripts/lib/wic/plugins/source/bootimg_pcbios.py (renamed from scripts/lib/wic/plugins/source/bootimg-pcbios.py)4
-rw-r--r--scripts/lib/wic/plugins/source/isoimage_isohybrid.py (renamed from scripts/lib/wic/plugins/source/isoimage-isohybrid.py)10
-rw-r--r--scripts/lib/wic/plugins/source/rootfs.py2
-rwxr-xr-xscripts/lz4c26
-rwxr-xr-xscripts/makefile-getvar24
-rwxr-xr-xscripts/oe-build-perf-report8
-rwxr-xr-xscripts/oe-selftest2
-rwxr-xr-xscripts/oe-setup-build10
-rwxr-xr-xscripts/patchtest74
-rw-r--r--scripts/patchtest.README62
-rwxr-xr-xscripts/pull-sdpx-licenses.py101
-rw-r--r--scripts/pybootchartgui/pybootchartgui/draw.py48
-rw-r--r--scripts/pybootchartgui/pybootchartgui/parsing.py28
-rw-r--r--scripts/pybootchartgui/pybootchartgui/samples.py10
-rwxr-xr-xscripts/relocate_sdk.py32
-rwxr-xr-xscripts/resulttool5
-rwxr-xr-xscripts/runqemu49
-rwxr-xr-xscripts/send-error-report49
-rwxr-xr-xscripts/sstate-cache-management.py7
-rwxr-xr-xscripts/test-remote-image9
-rwxr-xr-xscripts/wic49
82 files changed, 2484 insertions, 1739 deletions
diff --git a/scripts/b4-wrapper-poky.py b/scripts/b4-wrapper-poky.py
new file mode 100755
index 0000000000..f1170db06b
--- /dev/null
+++ b/scripts/b4-wrapper-poky.py
@@ -0,0 +1,185 @@
1#!/usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7# This script is to be called by b4:
8# - through the b4.prep-perpatch-check-cmd with "prep-perpatch-check-cmd" as
9# first argument,
10# - through b4.send-auto-cc-cmd with "send-auto-cc-cmd" as first argument,
11# - through b4.send-auto-to-cmd with "send-auto-to-cmd" as first argument,
12#
13# When prep-perpatch-check-cmd is passsed:
14#
15# This checks that a patch makes changes to at most one project in the poky
16# combo repo (that is, out of yocto-docs, bitbake, openembedded-core combined
17# into poky and the poky-specific files).
18#
19# Printing something to stdout in this file will result in b4 prep --check fail
20# for the currently parsed patch.
21#
22# It checks that all patches in the series make changes to at most one project.
23#
24# When send-auto-cc-cmd is passed:
25#
26# This returns the list of Cc recipients for a patch.
27#
28# When send-auto-to-cmd is passed:
29#
30# This returns the list of To recipients for a patch.
31#
32# This script takes as stdin a patch.
33
34import pathlib
35import re
36import shutil
37import subprocess
38import sys
39
40cmd = sys.argv[1]
41
42patch = sys.stdin.readlines()
43
44# Subject field is used to identify the last patch as this script is called for
45# each patch. We edit the same file in a series by using the References field
46# unique identifier to check which projects are modified by earlier patches in
47# the series. To avoid cluttering the disk, the last patch in the list removes
48# that shared file.
49re_subject = re.compile(r'^Subject:.*\[.*PATCH.*\s(\d+)/\1')
50re_ref = re.compile(r'^References: <(.*)>$')
51
52subject = None
53ref = None
54
55if not shutil.which("lsdiff"):
56 print("lsdiff missing from host, please install patchutils", file=sys.stderr)
57 sys.exit(-1)
58
59try:
60 one_patch_series = False
61 for line in patch:
62 subject = re_subject.match(line)
63 if subject:
64 # Handle [PATCH 1/1]
65 if subject.group(1) == 1:
66 one_patch_series = True
67 break
68 if re.match(r'^Subject: .*\[.*PATCH[^/]*\]', line):
69 # Single patch is named [PATCH] but if there are prefix, it could be
70 # [PATCH prefix], so handle everything that doesn't have a /
71 # character which is used as separator between current patch number
72 # and total patch number
73 one_patch_series = True
74 break
75
76 if cmd == "prep-perpatch-check-cmd" and not one_patch_series:
77 for line in patch:
78 ref = re_ref.match(line)
79 if ref:
80 break
81
82 if not ref:
83 print("Failed to find ref to cover letter (References:)...", file=sys.stderr)
84 sys.exit(-2)
85
86 ref = ref.group(1)
87 series_check = pathlib.Path(f".tmp-{ref}")
88
89 patch = "".join(patch)
90
91 if cmd == "send-auto-cc-cmd":
92 # Patches to BitBake documentation should also go to yocto-docs mailing list
93 project_paths = {
94 "yocto-docs": ["bitbake/doc/*"],
95 }
96 else:
97 project_paths = {
98 "bitbake": ["bitbake/*"],
99 "yocto-docs": ["documentation/*"],
100 "poky": [
101 "meta-poky/*",
102 "meta-yocto-bsp/*",
103 "README.hardware.md",
104 "README.poky.md",
105 # scripts/b4-wrapper-poky.py is only run by b4 when in poky
106 # git repo. With that limitation, changes made to .b4-config
107 # can only be for poky's and not OE-Core's as only poky's is
108 # stored in poky git repo.
109 ".b4-config",
110 ],
111 }
112
113 # List of projects touched by this patch
114 projs = []
115
116 # Any file not matched by any path in project_paths means it is from
117 # OE-Core.
118 # When matching some path in project_paths, remove the matched files from
119 # that list.
120 files_left = subprocess.check_output(["lsdiff", "--strip-match=1", "--strip=1"],
121 input=patch, text=True)
122 files_left = set(files_left)
123
124 for proj, proj_paths in project_paths.items():
125 lsdiff_args = [f"--include={path}" for path in proj_paths]
126 files = subprocess.check_output(["lsdiff", "--strip-match=1", "--strip=1"] + lsdiff_args,
127 input=patch, text=True)
128 if len(files):
129 files_left = files_left - set(files)
130 projs.append(proj)
131 continue
132
133 # Handle patches made with --no-prefix
134 files = subprocess.check_output(["lsdiff"] + lsdiff_args,
135 input=patch, text=True)
136 if len(files):
137 files_left = files_left - set(files)
138 projs.append(proj)
139
140 # Catch-all for everything not poky-specific or in bitbake/yocto-docs
141 if len(files_left) and cmd != "send-auto-cc-cmd":
142 projs.append("openembedded-core")
143
144 if cmd == "prep-perpatch-check-cmd":
145 if len(projs) > 1:
146 print(f"Diff spans more than one project ({', '.join(sorted(projs))}), split into multiple commits...",
147 file=sys.stderr)
148 sys.exit(-3)
149
150 # No need to check other patches in the series as there aren't any
151 if one_patch_series:
152 sys.exit(0)
153
154 # This should be replaced once b4 supports prep-perseries-check-cmd (or something similar)
155
156 if series_check.exists():
157 # NOT race-free if b4 decides to parallelize prep-perpatch-check-cmd
158 series_projs = series_check.read_text().split('\n')
159 else:
160 series_projs = []
161
162 series_projs += projs
163 uniq_series_projs = set(series_projs)
164 # NOT race-free, if b4 decides to parallelize prep-perpatch-check-cmd
165 series_check.write_text('\n'.join(uniq_series_projs))
166
167 if len(uniq_series_projs) > 1:
168 print(f"Series spans more than one project ({', '.join(sorted(uniq_series_projs))}), split into multiple series...",
169 file=sys.stderr)
170 sys.exit(-4)
171 else: # send-auto-cc-cmd / send-auto-to-cmd
172 ml_projs = {
173 "bitbake": "bitbake-devel@lists.openembedded.org",
174 "yocto-docs": "docs@lists.yoctoproject.org",
175 "poky": "poky@lists.yoctoproject.org",
176 "openembedded-core": "openembedded-core@lists.openembedded.org",
177 }
178
179 print("\n".join([ml_projs[ml] for ml in projs]))
180
181 sys.exit(0)
182finally:
183 # Last patch in the series, cleanup tmp file
184 if subject and ref and series_check.exists():
185 series_check.unlink()
diff --git a/scripts/bitbake-prserv-tool b/scripts/bitbake-prserv-tool
index 80028342b1..0559c4c38a 100755
--- a/scripts/bitbake-prserv-tool
+++ b/scripts/bitbake-prserv-tool
@@ -55,43 +55,6 @@ do_import ()
55 return $ret 55 return $ret
56} 56}
57 57
58do_migrate_localcount ()
59{
60 df=`bitbake -R conf/migrate_localcount.conf -e | \
61 grep ^LOCALCOUNT_DUMPFILE= | cut -f2 -d\"`
62 if [ "x${df}" == "x" ];
63 then
64 echo "LOCALCOUNT_DUMPFILE is not defined!"
65 return 1
66 fi
67
68 rm -f $df
69 clean_cache
70 echo "Exporting LOCALCOUNT to AUTOINCs..."
71 bitbake -R conf/migrate_localcount.conf -p
72 [ ! $? -eq 0 ] && echo "Exporting to file $df failed!" && exit 1
73
74 if [ -e $df ];
75 then
76 echo "Exporting to file $df succeeded!"
77 else
78 echo "Exporting to file $df failed!"
79 exit 1
80 fi
81
82 echo "Importing generated AUTOINC entries..."
83 [ -e $df ] && do_import $df
84
85 if [ ! $? -eq 0 ]
86 then
87 echo "Migration from LOCALCOUNT to AUTOINCs failed!"
88 return 1
89 fi
90
91 echo "Migration from LOCALCOUNT to AUTOINCs succeeded!"
92 return 0
93}
94
95[ $# -eq 0 ] && help && exit 1 58[ $# -eq 0 ] && help && exit 1
96 59
97case $2 in 60case $2 in
@@ -110,9 +73,6 @@ export)
110import) 73import)
111 do_import $2 74 do_import $2
112 ;; 75 ;;
113migrate_localcount)
114 do_migrate_localcount
115 ;;
116*) 76*)
117 help 77 help
118 exit 1 78 exit 1
diff --git a/scripts/buildstats-diff b/scripts/buildstats-diff
index c9aa76a8fa..df1df432f1 100755
--- a/scripts/buildstats-diff
+++ b/scripts/buildstats-diff
@@ -12,6 +12,7 @@ import glob
12import logging 12import logging
13import math 13import math
14import os 14import os
15import pathlib
15import sys 16import sys
16from operator import attrgetter 17from operator import attrgetter
17 18
@@ -251,11 +252,32 @@ Script for comparing buildstats of two separate builds."""
251 "average over them") 252 "average over them")
252 parser.add_argument('--only-task', dest='only_tasks', metavar='TASK', action='append', default=[], 253 parser.add_argument('--only-task', dest='only_tasks', metavar='TASK', action='append', default=[],
253 help="Only include TASK in report. May be specified multiple times") 254 help="Only include TASK in report. May be specified multiple times")
254 parser.add_argument('buildstats1', metavar='BUILDSTATS1', help="'Left' buildstat") 255 parser.add_argument('buildstats1', metavar='BUILDSTATS1', nargs="?", help="'Left' buildstat")
255 parser.add_argument('buildstats2', metavar='BUILDSTATS2', help="'Right' buildstat") 256 parser.add_argument('buildstats2', metavar='BUILDSTATS2', nargs="?", help="'Right' buildstat")
256 257
257 args = parser.parse_args(argv) 258 args = parser.parse_args(argv)
258 259
260 if args.buildstats1 and args.buildstats2:
261 # Both paths specified
262 pass
263 elif args.buildstats1 or args.buildstats2:
264 # Just one path specified, this is an error
265 parser.print_usage(sys.stderr)
266 print("Either specify two buildstats paths, or none to use the last two paths.", file=sys.stderr)
267 sys.exit(1)
268 else:
269 # No paths specified, try to find the last two buildstats
270 try:
271 buildstats_dir = pathlib.Path(os.environ["BUILDDIR"]) / "tmp" / "buildstats"
272 paths = sorted(buildstats_dir.iterdir())
273 args.buildstats2 = paths.pop()
274 args.buildstats1 = paths.pop()
275 print(f"Comparing {args.buildstats1} -> {args.buildstats2}\n")
276 except KeyError:
277 parser.print_usage(sys.stderr)
278 print("Build environment has not been configured, cannot find buildstats", file=sys.stderr)
279 sys.exit(1)
280
259 # We do not nedd/want to read all buildstats if we just want to look at the 281 # We do not nedd/want to read all buildstats if we just want to look at the
260 # package versions 282 # package versions
261 if args.ver_diff: 283 if args.ver_diff:
diff --git a/scripts/buildstats-summary b/scripts/buildstats-summary
index b10c671b29..cc2a27722a 100755
--- a/scripts/buildstats-summary
+++ b/scripts/buildstats-summary
@@ -87,7 +87,11 @@ def main(argv=None) -> int:
87 ) 87 )
88 88
89 parser.add_argument( 89 parser.add_argument(
90 "buildstats", metavar="BUILDSTATS", help="Buildstats file", type=pathlib.Path 90 "buildstats",
91 metavar="BUILDSTATS",
92 nargs="?",
93 type=pathlib.Path,
94 help="Buildstats file, or latest if not specified",
91 ) 95 )
92 parser.add_argument( 96 parser.add_argument(
93 "--sort", 97 "--sort",
@@ -116,6 +120,16 @@ def main(argv=None) -> int:
116 120
117 args = parser.parse_args(argv) 121 args = parser.parse_args(argv)
118 122
123 # If a buildstats file wasn't specified, try to find the last one
124 if not args.buildstats:
125 try:
126 builddir = pathlib.Path(os.environ["BUILDDIR"])
127 buildstats_dir = builddir / "tmp" / "buildstats"
128 args.buildstats = sorted(buildstats_dir.iterdir())[-1]
129 except KeyError:
130 print("Build environment has not been configured, cannot find buildstats")
131 return 1
132
119 bs = read_buildstats(args.buildstats) 133 bs = read_buildstats(args.buildstats)
120 dump_buildstats(args, bs) 134 dump_buildstats(args, bs)
121 135
diff --git a/scripts/clean-hashserver-database b/scripts/clean-hashserver-database
new file mode 100755
index 0000000000..9fa162c981
--- /dev/null
+++ b/scripts/clean-hashserver-database
@@ -0,0 +1,77 @@
1#!/bin/bash
2set -euo pipefail
3
4SSTATE_DIR=""
5BB_HASHCLIENT=""
6BB_HASHSERVER=""
7
8ALIVE_DB_MARK="alive"
9CLEAN_DB="false"
10THRESHOLD_AGE="3600"
11
12function help() {
13 cat <<HELP_TEXT
14Usage: $0 --sstate-dir path --hashclient path --hashserver-address address \
15[--mark value] [--clean-db] [--threshold-age seconds]
16
17Auxiliary script remove unused or no longer relevant entries from the hashequivalence database, based
18on the files available on the sstate directory.
19
20 -h | --help) Show this help message and exit
21 -a | --hashserver-adress) bitbake-hashserver address
22 -c | --hashclient) Path to bitbake-hashclient
23 -m | --mark) Marker string to mark database entries
24 -s | --sstate-dir) Path to the sstate dir
25 -t | --threshold-age) Remove unused entries older than SECONDS old (default: 3600)
26 --clean-db) Remove all unmarked and unused entries from the database
27HELP_TEXT
28}
29
30function argument_parser() {
31 while [ $# -gt 0 ]; do
32 case "$1" in
33 -h | --help) help; exit 0 ;;
34 -a | --hashserver-address) BB_HASHSERVER="$2"; shift ;;
35 -c | --hashclient) BB_HASHCLIENT="$2"; shift ;;
36 -m | --mark) ALIVE_DB_MARK="$2"; shift ;;
37 -s | --sstate-dir) SSTATE_DIR="$2"; shift ;;
38 -t | --threshold-age) THRESHOLD_AGE="$2"; shift ;;
39 --clean-db) CLEAN_DB="true";;
40 *)
41 echo "Argument '$1' is not supported" >&2
42 help >&2
43 exit 1
44 ;;
45 esac
46 shift
47 done
48
49 function validate_mandatory_argument() {
50 local var_value="$1"
51 local error_message="$2"
52
53 if [ -z "$var_value" ]; then
54 echo "$error_message"
55 help >&2
56 exit 1
57 fi
58 }
59
60 validate_mandatory_argument "$SSTATE_DIR" "Please provide the path to the sstate dir."
61 validate_mandatory_argument "$BB_HASHCLIENT" "Please provide the path to bitbake-hashclient."
62 validate_mandatory_argument "$BB_HASHSERVER" "Please provide the address of bitbake-hashserver."
63}
64
65# -- main code --
66argument_parser $@
67
68# Mark all db sstate hashes
69find "$SSTATE_DIR" -name "*.tar.zst" | \
70sed 's/.*:\([^_]*\)_.*/unihash \1/' | \
71$BB_HASHCLIENT --address "$BB_HASHSERVER" gc-mark-stream "$ALIVE_DB_MARK"
72
73# Remove unmarked and unused entries
74if [ "$CLEAN_DB" = "true" ]; then
75 $BB_HASHCLIENT --address "$BB_HASHSERVER" gc-sweep "$ALIVE_DB_MARK"
76 $BB_HASHCLIENT --address "$BB_HASHSERVER" clean-unused "$THRESHOLD_AGE"
77fi
diff --git a/scripts/contrib/convert-spdx-licenses.py b/scripts/contrib/convert-spdx-licenses.py
index 4e194dee3f..13cf12a33f 100755
--- a/scripts/contrib/convert-spdx-licenses.py
+++ b/scripts/contrib/convert-spdx-licenses.py
@@ -93,7 +93,7 @@ license_map = {
93"Nauman" : "Naumen", 93"Nauman" : "Naumen",
94"tcl" : "TCL", 94"tcl" : "TCL",
95"vim" : "Vim", 95"vim" : "Vim",
96"SGIv1" : "SGI-1", 96"SGIv1" : "SGI-OpenGL",
97} 97}
98 98
99def processfile(fn): 99def processfile(fn):
diff --git a/scripts/contrib/improve_kernel_cve_report.py b/scripts/contrib/improve_kernel_cve_report.py
new file mode 100755
index 0000000000..829cc4cd30
--- /dev/null
+++ b/scripts/contrib/improve_kernel_cve_report.py
@@ -0,0 +1,467 @@
1#! /usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# The script uses another source of CVE information from linux-vulns
6# to enrich the cve-summary from cve-check or vex.
7# It can also use the list of compiled files from the kernel spdx to ignore CVEs
8# that are not affected since the files are not compiled.
9#
10# It creates a new json file with updated CVE information
11#
12# Compiled files can be extracted adding the following in local.conf
13# SPDX_INCLUDE_COMPILED_SOURCES:pn-linux-yocto = "1"
14#
15# Tested with the following CVE sources:
16# - https://git.kernel.org/pub/scm/linux/security/vulns.git
17# - https://github.com/CVEProject/cvelistV5
18#
19# Example:
20# python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --kernel-version 6.12.27 --datadir ./vulns
21# python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --datadir ./vulns --old-cve-report build/tmp/log/cve/cve-summary.json
22#
23# SPDX-License-Identifier: GPLv2
24
25import argparse
26import json
27import sys
28import logging
29import glob
30import os
31import pathlib
32from packaging.version import Version
33
34def is_linux_cve(cve_info):
35 '''Return true is the CVE belongs to Linux'''
36 if not "affected" in cve_info["containers"]["cna"]:
37 return False
38 for affected in cve_info["containers"]["cna"]["affected"]:
39 if not "product" in affected:
40 return False
41 if affected["product"] == "Linux" and affected["vendor"] == "Linux":
42 return True
43 return False
44
45def get_kernel_cves(datadir, compiled_files, version):
46 """
47 Get CVEs for the kernel
48 """
49 cves = {}
50
51 check_config = len(compiled_files) > 0
52
53 base_version = Version(f"{version.major}.{version.minor}")
54
55 # Check all CVES from kernel vulns
56 pattern = os.path.join(datadir, '**', "CVE-*.json")
57 cve_files = glob.glob(pattern, recursive=True)
58 not_applicable_config = 0
59 fixed_as_later_backport = 0
60 vulnerable = 0
61 not_vulnerable = 0
62 for cve_file in sorted(cve_files):
63 cve_info = {}
64 with open(cve_file, "r", encoding='ISO-8859-1') as f:
65 cve_info = json.load(f)
66
67 if len(cve_info) == 0:
68 logging.error("Not valid data in %s. Aborting", cve_file)
69 break
70
71 if not is_linux_cve(cve_info):
72 continue
73 cve_id = os.path.basename(cve_file)[:-5]
74 description = cve_info["containers"]["cna"]["descriptions"][0]["value"]
75 if cve_file.find("rejected") >= 0:
76 logging.debug("%s is rejected by the CNA", cve_id)
77 cves[cve_id] = {
78 "id": cve_id,
79 "status": "Ignored",
80 "detail": "rejected",
81 "summary": description,
82 "description": f"Rejected by CNA"
83 }
84 continue
85 if any(elem in cve_file for elem in ["review", "reverved", "testing"]):
86 continue
87
88 is_vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected_versions = get_cpe_applicability(cve_info, version)
89
90 logging.debug("%s: %s (%s - %s) (%s - %s)", cve_id, is_vulnerable, better_match_first, better_match_last, first_affected, last_affected)
91
92 if is_vulnerable is None:
93 logging.warning("%s doesn't have good metadata", cve_id)
94 if is_vulnerable:
95 is_affected = True
96 affected_files = []
97 if check_config:
98 is_affected, affected_files = check_kernel_compiled_files(compiled_files, cve_info)
99
100 if not is_affected and len(affected_files) > 0:
101 logging.debug(
102 "%s - not applicable configuration since affected files not compiled: %s",
103 cve_id, affected_files)
104 cves[cve_id] = {
105 "id": cve_id,
106 "status": "Ignored",
107 "detail": "not-applicable-config",
108 "summary": description,
109 "description": f"Source code not compiled by config. {affected_files}"
110 }
111 not_applicable_config +=1
112 # Check if we have backport
113 else:
114 if not better_match_last:
115 fixed_in = last_affected
116 else:
117 fixed_in = better_match_last
118 logging.debug("%s needs backporting (fixed from %s)", cve_id, fixed_in)
119 cves[cve_id] = {
120 "id": cve_id,
121 "status": "Unpatched",
122 "detail": "version-in-range",
123 "summary": description,
124 "description": f"Needs backporting (fixed from {fixed_in})"
125 }
126 vulnerable += 1
127 if (better_match_last and
128 Version(f"{better_match_last.major}.{better_match_last.minor}") == base_version):
129 fixed_as_later_backport += 1
130 # Not vulnerable
131 else:
132 if not first_affected:
133 logging.debug("%s - not known affected %s",
134 cve_id,
135 better_match_last)
136 cves[cve_id] = {
137 "id": cve_id,
138 "status": "Patched",
139 "detail": "version-not-in-range",
140 "summary": description,
141 "description": "No CPE match"
142 }
143 not_vulnerable += 1
144 continue
145 backport_base = Version(f"{better_match_last.major}.{better_match_last.minor}")
146 if version < first_affected:
147 logging.debug('%s - fixed-version: only affects %s onwards',
148 cve_id,
149 first_affected)
150 cves[cve_id] = {
151 "id": cve_id,
152 "status": "Patched",
153 "detail": "fixed-version",
154 "summary": description,
155 "description": f"only affects {first_affected} onwards"
156 }
157 not_vulnerable += 1
158 elif last_affected <= version:
159 logging.debug("%s - fixed-version: Fixed from version %s",
160 cve_id,
161 last_affected)
162 cves[cve_id] = {
163 "id": cve_id,
164 "status": "Patched",
165 "detail": "fixed-version",
166 "summary": description,
167 "description": f"fixed-version: Fixed from version {last_affected}"
168 }
169 not_vulnerable += 1
170 elif backport_base == base_version:
171 logging.debug("%s - cpe-stable-backport: Backported in %s",
172 cve_id,
173 better_match_last)
174 cves[cve_id] = {
175 "id": cve_id,
176 "status": "Patched",
177 "detail": "cpe-stable-backport",
178 "summary": description,
179 "description": f"Backported in {better_match_last}"
180 }
181 not_vulnerable += 1
182 else:
183 logging.debug("%s - version not affected %s", cve_id, str(affected_versions))
184 cves[cve_id] = {
185 "id": cve_id,
186 "status": "Patched",
187 "detail": "version-not-in-range",
188 "summary": description,
189 "description": f"Range {affected_versions}"
190 }
191 not_vulnerable += 1
192
193 logging.info("Total CVEs ignored due to not applicable config: %d", not_applicable_config)
194 logging.info("Total CVEs not vulnerable due version-not-in-range: %d", not_vulnerable)
195 logging.info("Total vulnerable CVEs: %d", vulnerable)
196
197 logging.info("Total CVEs already backported in %s: %s", base_version,
198 fixed_as_later_backport)
199 return cves
200
201def read_spdx(spdx_file):
202 '''Open SPDX file and extract compiled files'''
203 with open(spdx_file, 'r', encoding='ISO-8859-1') as f:
204 spdx = json.load(f)
205 if "spdxVersion" in spdx:
206 if spdx["spdxVersion"] == "SPDX-2.2":
207 return read_spdx2(spdx)
208 if "@graph" in spdx:
209 return read_spdx3(spdx)
210 return []
211
212def read_spdx2(spdx):
213 '''
214 Read spdx2 compiled files from spdx
215 '''
216 cfiles = set()
217 if 'files' not in spdx:
218 return cfiles
219 for item in spdx['files']:
220 for ftype in item['fileTypes']:
221 if ftype == "SOURCE":
222 filename = item["fileName"][item["fileName"].find("/")+1:]
223 cfiles.add(filename)
224 return cfiles
225
226def read_spdx3(spdx):
227 '''
228 Read spdx3 compiled files from spdx
229 '''
230 cfiles = set()
231 for item in spdx["@graph"]:
232 if "software_primaryPurpose" not in item:
233 continue
234 if item["software_primaryPurpose"] == "source":
235 filename = item['name'][item['name'].find("/")+1:]
236 cfiles.add(filename)
237 return cfiles
238
239def check_kernel_compiled_files(compiled_files, cve_info):
240 """
241 Return if a CVE affected us depending on compiled files
242 """
243 files_affected = set()
244 is_affected = False
245
246 for item in cve_info['containers']['cna']['affected']:
247 if "programFiles" in item:
248 for f in item['programFiles']:
249 if f not in files_affected:
250 files_affected.add(f)
251
252 if len(files_affected) > 0:
253 for f in files_affected:
254 if f in compiled_files:
255 logging.debug("File match: %s", f)
256 is_affected = True
257 return is_affected, files_affected
258
259def get_cpe_applicability(cve_info, v):
260 '''
261 Check if version is affected and return affected versions
262 '''
263 base_branch = Version(f"{v.major}.{v.minor}")
264 affected = []
265 if not 'cpeApplicability' in cve_info["containers"]["cna"]:
266 return None, None, None, None, None, None
267
268 for nodes in cve_info["containers"]["cna"]["cpeApplicability"]:
269 for node in nodes.values():
270 vulnerable = False
271 matched_branch = False
272 first_affected = Version("5000")
273 last_affected = Version("0")
274 better_match_first = Version("0")
275 better_match_last = Version("5000")
276
277 if len(node[0]['cpeMatch']) == 0:
278 first_affected = None
279 last_affected = None
280 better_match_first = None
281 better_match_last = None
282
283 for cpe_match in node[0]['cpeMatch']:
284 version_start_including = Version("0")
285 version_end_excluding = Version("0")
286 if 'versionStartIncluding' in cpe_match:
287 version_start_including = Version(cpe_match['versionStartIncluding'])
288 else:
289 version_start_including = Version("0")
290 # if versionEndExcluding is missing we are in a branch, which is not fixed.
291 if "versionEndExcluding" in cpe_match:
292 version_end_excluding = Version(cpe_match["versionEndExcluding"])
293 else:
294 # if versionEndExcluding is missing we are in a branch, which is not fixed.
295 version_end_excluding = Version(
296 f"{version_start_including.major}.{version_start_including.minor}.5000"
297 )
298 affected.append(f" {version_start_including}-{version_end_excluding}")
299 # Detect if versionEnd is in fixed in base branch. It has precedence over the rest
300 branch_end = Version(f"{version_end_excluding.major}.{version_end_excluding.minor}")
301 if branch_end == base_branch:
302 if version_start_including <= v < version_end_excluding:
303 vulnerable = cpe_match['vulnerable']
304 # If we don't match in our branch, we are not vulnerable,
305 # since we have a backport
306 matched_branch = True
307 better_match_first = version_start_including
308 better_match_last = version_end_excluding
309 if version_start_including <= v < version_end_excluding and not matched_branch:
310 if version_end_excluding < better_match_last:
311 better_match_first = max(version_start_including, better_match_first)
312 better_match_last = min(better_match_last, version_end_excluding)
313 vulnerable = cpe_match['vulnerable']
314 matched_branch = True
315
316 first_affected = min(version_start_including, first_affected)
317 last_affected = max(version_end_excluding, last_affected)
318 # Not a better match, we use the first and last affected instead of the fake .5000
319 if vulnerable and better_match_last == Version(f"{base_branch}.5000"):
320 better_match_last = last_affected
321 better_match_first = first_affected
322 return vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected
323
324def copy_data(old, new):
325 '''Update dictionary with new entries, while keeping the old ones'''
326 for k in new.keys():
327 old[k] = new[k]
328 return old
329
330# Function taken from cve_check.bbclass. Adapted to cve fields
331def cve_update(cve_data, cve, entry):
332 # If no entry, just add it
333 if cve not in cve_data:
334 cve_data[cve] = entry
335 return
336 # If we are updating, there might be change in the status
337 if cve_data[cve]['status'] == "Unknown":
338 cve_data[cve] = copy_data(cve_data[cve], entry)
339 return
340 if cve_data[cve]['status'] == entry['status']:
341 return
342 if entry['status'] == "Unpatched" and cve_data[cve]['status'] == "Patched":
343 logging.warning("CVE entry %s update from Patched to Unpatched from the scan result", cve)
344 cve_data[cve] = copy_data(cve_data[cve], entry)
345 return
346 if entry['status'] == "Patched" and cve_data[cve]['status'] == "Unpatched":
347 logging.warning("CVE entry %s update from Unpatched to Patched from the scan result", cve)
348 cve_data[cve] = copy_data(cve_data[cve], entry)
349 return
350 # If we have an "Ignored", it has a priority
351 if cve_data[cve]['status'] == "Ignored":
352 logging.debug("CVE %s not updating because Ignored", cve)
353 return
354 # If we have an "Ignored", it has a priority
355 if entry['status'] == "Ignored":
356 cve_data[cve] = copy_data(cve_data[cve], entry)
357 logging.debug("CVE entry %s updated from Unpatched to Ignored", cve)
358 return
359 logging.warning("Unhandled CVE entry update for %s %s from %s %s to %s",
360 cve, cve_data[cve]['status'], cve_data[cve]['detail'], entry['status'], entry['detail'])
361
362def main():
363 parser = argparse.ArgumentParser(
364 description="Update cve-summary with kernel compiled files and kernel CVE information"
365 )
366 parser.add_argument(
367 "-s",
368 "--spdx",
369 help="SPDX2/3 for the kernel. Needs to include compiled sources",
370 )
371 parser.add_argument(
372 "--datadir",
373 type=pathlib.Path,
374 help="Directory where CVE data is",
375 required=True
376 )
377 parser.add_argument(
378 "--old-cve-report",
379 help="CVE report to update. (Optional)",
380 )
381 parser.add_argument(
382 "--kernel-version",
383 help="Kernel version. Needed if old cve_report is not provided (Optional)",
384 type=Version
385 )
386 parser.add_argument(
387 "--new-cve-report",
388 help="Output file",
389 default="cve-summary-enhance.json"
390 )
391 parser.add_argument(
392 "-D",
393 "--debug",
394 help='Enable debug ',
395 action="store_true")
396
397 args = parser.parse_args()
398
399 if args.debug:
400 log_level=logging.DEBUG
401 else:
402 log_level=logging.INFO
403 logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', level=log_level)
404
405 if not args.kernel_version and not args.old_cve_report:
406 parser.error("either --kernel-version or --old-cve-report are needed")
407 return -1
408
409 # by default we don't check the compiled files, unless provided
410 compiled_files = []
411 if args.spdx:
412 compiled_files = read_spdx(args.spdx)
413 logging.info("Total compiled files %d", len(compiled_files))
414
415 if args.old_cve_report:
416 with open(args.old_cve_report, encoding='ISO-8859-1') as f:
417 cve_report = json.load(f)
418 else:
419 #If summary not provided, we create one
420 cve_report = {
421 "version": "1",
422 "package": [
423 {
424 "name": "linux-yocto",
425 "version": str(args.kernel_version),
426 "products": [
427 {
428 "product": "linux_kernel",
429 "cvesInRecord": "Yes"
430 }
431 ],
432 "issue": []
433 }
434 ]
435 }
436
437 for pkg in cve_report['package']:
438 is_kernel = False
439 for product in pkg['products']:
440 if product['product'] == "linux_kernel":
441 is_kernel=True
442 if not is_kernel:
443 continue
444
445 kernel_cves = get_kernel_cves(args.datadir,
446 compiled_files,
447 Version(pkg["version"]))
448 logging.info("Total kernel cves from kernel CNA: %s", len(kernel_cves))
449 cves = {issue["id"]: issue for issue in pkg["issue"]}
450 logging.info("Total kernel before processing cves: %s", len(cves))
451
452 for cve in kernel_cves:
453 cve_update(cves, cve, kernel_cves[cve])
454
455 pkg["issue"] = []
456 for cve in sorted(cves):
457 pkg["issue"].extend([cves[cve]])
458 logging.info("Total kernel cves after processing: %s", len(pkg['issue']))
459
460 with open(args.new_cve_report, "w", encoding='ISO-8859-1') as f:
461 json.dump(cve_report, f, indent=2)
462
463 return 0
464
465if __name__ == "__main__":
466 sys.exit(main())
467
diff --git a/scripts/contrib/make-spdx-bindings.sh b/scripts/contrib/make-spdx-bindings.sh
new file mode 100755
index 0000000000..31caaf339d
--- /dev/null
+++ b/scripts/contrib/make-spdx-bindings.sh
@@ -0,0 +1,12 @@
1#! /bin/sh
2#
3# SPDX-License-Identifier: MIT
4
5THIS_DIR="$(dirname "$0")"
6
7VERSION="3.0.1"
8
9shacl2code generate --input https://spdx.org/rdf/$VERSION/spdx-model.ttl \
10 --input https://spdx.org/rdf/$VERSION/spdx-json-serialize-annotations.ttl \
11 --context https://spdx.org/rdf/$VERSION/spdx-context.jsonld \
12 python -o $THIS_DIR/../../meta/lib/oe/spdx30.py
diff --git a/scripts/contrib/oe-image-files-spdx/.gitignore b/scripts/contrib/oe-image-files-spdx/.gitignore
new file mode 100644
index 0000000000..285851c984
--- /dev/null
+++ b/scripts/contrib/oe-image-files-spdx/.gitignore
@@ -0,0 +1,8 @@
1*.spdx.json
2*.pyc
3*.bak
4*.swp
5*.swo
6*.swn
7venv/*
8.venv/*
diff --git a/scripts/contrib/oe-image-files-spdx/README.md b/scripts/contrib/oe-image-files-spdx/README.md
new file mode 100644
index 0000000000..44f76eacd8
--- /dev/null
+++ b/scripts/contrib/oe-image-files-spdx/README.md
@@ -0,0 +1,24 @@
1# OE Image Files from SBoM
2
3This is an example python script that will list the packaged files with their
4checksums based on the SPDX 3.0.1 SBoM.
5
6It can be used as a template for other programs to investigate output based on
7OE SPDX SBoMs
8
9## Installation
10
11This project can be installed using an virtual environment:
12```
13python3 -m venv .venv
14.venv/bin/activate
15python3 -m pip install -e '.[dev]'
16```
17
18## Usage
19
20After installing, the `oe-image-files` program can be used to show the files, e.g.:
21
22```
23oe-image-files core-image-minimal-qemux86-64.rootfs.spdx.json
24```
diff --git a/scripts/contrib/oe-image-files-spdx/pyproject.toml b/scripts/contrib/oe-image-files-spdx/pyproject.toml
new file mode 100644
index 0000000000..3fab5dd605
--- /dev/null
+++ b/scripts/contrib/oe-image-files-spdx/pyproject.toml
@@ -0,0 +1,23 @@
1[project]
2name = "oe-image-files"
3description = "Displays all packaged files on the root file system"
4dynamic = ["version"]
5requires-python = ">= 3.8"
6readme = "README.md"
7
8dependencies = [
9 "spdx_python_model @ git+https://github.com/spdx/spdx-python-model.git@aa40861f11d1b5d20edba7101835341a70d91179",
10]
11
12[project.scripts]
13oe-image-files = "oe_image_files:main"
14
15[build-system]
16requires = ["hatchling"]
17build-backend = "hatchling.build"
18
19[tool.hatch.version]
20path = "src/oe_image_files/version.py"
21
22[tool.hatch.metadata]
23allow-direct-references = true
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py
new file mode 100644
index 0000000000..c28a133f2d
--- /dev/null
+++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py
@@ -0,0 +1 @@
from .main import main
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py
new file mode 100644
index 0000000000..8476bf6369
--- /dev/null
+++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py
@@ -0,0 +1,86 @@
1# SPDX-License-Identifier: MIT
2
3import argparse
4from pathlib import Path
5
6
7from spdx_python_model import v3_0_1 as spdx_3_0_1
8from .version import VERSION
9
10
11def main():
12 parser = argparse.ArgumentParser(
13 description="Show the packaged files and checksums in an OE image from the SPDX SBoM"
14 )
15 parser.add_argument("file", help="SPDX 3 input file", type=Path)
16 parser.add_argument("--version", "-V", action="version", version=VERSION)
17
18 args = parser.parse_args()
19
20 # Load SPDX data from file into a new object set
21 objset = spdx_3_0_1.SHACLObjectSet()
22 with args.file.open("r") as f:
23 d = spdx_3_0_1.JSONLDDeserializer()
24 d.read(f, objset)
25
26 # Find the top level SPDX Document object
27 for o in objset.foreach_type(spdx_3_0_1.SpdxDocument):
28 doc = o
29 break
30 else:
31 print("ERROR: No SPDX Document found!")
32 return 1
33
34 # Find the root SBoM in the document
35 for o in doc.rootElement:
36 if isinstance(o, spdx_3_0_1.software_Sbom):
37 sbom = o
38 break
39 else:
40 print("ERROR: SBoM not found in document")
41 return 1
42
43 # Find the root file system package in the SBoM
44 for o in sbom.rootElement:
45 if (
46 isinstance(o, spdx_3_0_1.software_Package)
47 and o.software_primaryPurpose == spdx_3_0_1.software_SoftwarePurpose.archive
48 ):
49 root_package = o
50 break
51 else:
52 print("ERROR: Package not found in document")
53 return 1
54
55 # Find all relationships of type "contains" that go FROM the root file
56 # system
57 files = []
58 for rel in objset.foreach_type(spdx_3_0_1.Relationship):
59 if not rel.relationshipType == spdx_3_0_1.RelationshipType.contains:
60 continue
61
62 if not rel.from_ is root_package:
63 continue
64
65 # Iterate over all files in the TO of the relationship
66 for o in rel.to:
67 if not isinstance(o, spdx_3_0_1.software_File):
68 continue
69
70 # Find the SHA 256 hash of the file (if any)
71 for h in o.verifiedUsing:
72 if (
73 isinstance(h, spdx_3_0_1.Hash)
74 and h.algorithm == spdx_3_0_1.HashAlgorithm.sha256
75 ):
76 files.append((o.name, h.hashValue))
77 break
78 else:
79 files.append((o.name, ""))
80
81 # Print files
82 files.sort(key=lambda x: x[0])
83 for name, hash_val in files:
84 print(f"{name} - {hash_val}")
85
86 return 0
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py
new file mode 100644
index 0000000000..901e5110b2
--- /dev/null
+++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py
@@ -0,0 +1 @@
VERSION = "0.0.1"
diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py
index bceae06561..d8d7b214e5 100755
--- a/scripts/contrib/patchreview.py
+++ b/scripts/contrib/patchreview.py
@@ -14,6 +14,10 @@ import pathlib
14import re 14import re
15import subprocess 15import subprocess
16 16
17import sys
18sys.path.append(os.path.join(sys.path[0], '../../meta/lib'))
19import oe.qa
20
17# TODO 21# TODO
18# - option to just list all broken files 22# - option to just list all broken files
19# - test suite 23# - test suite
@@ -47,7 +51,7 @@ def blame_patch(patch):
47 return subprocess.check_output(("git", "log", 51 return subprocess.check_output(("git", "log",
48 "--follow", "--find-renames", "--diff-filter=A", 52 "--follow", "--find-renames", "--diff-filter=A",
49 "--format=%s (%aN <%aE>)", 53 "--format=%s (%aN <%aE>)",
50 "--", patch)).decode("utf-8").splitlines() 54 "--", patch), cwd=os.path.dirname(patch)).decode("utf-8").splitlines()
51 55
52def patchreview(patches): 56def patchreview(patches):
53 57
@@ -78,12 +82,11 @@ def patchreview(patches):
78 else: 82 else:
79 result.missing_sob = True 83 result.missing_sob = True
80 84
81
82 # Find the Upstream-Status tag 85 # Find the Upstream-Status tag
83 match = status_re.search(content) 86 match = status_re.search(content)
84 if match: 87 if match:
85 value = match.group(1) 88 value = oe.qa.check_upstream_status(patch)
86 if value != "Upstream-Status:": 89 if value:
87 result.malformed_upstream_status = value 90 result.malformed_upstream_status = value
88 91
89 value = match.group(2).lower() 92 value = match.group(2).lower()
diff --git a/scripts/cve-json-to-text.py b/scripts/cve-json-to-text.py
new file mode 100755
index 0000000000..8d309b37e5
--- /dev/null
+++ b/scripts/cve-json-to-text.py
@@ -0,0 +1,146 @@
1#!/bin/env python3
2# SPDX-FileCopyrightText: OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5
6# CVE results conversion script: JSON format to text
7# Derived from cve-report.py from Oniro (MIT, by Huawei Inc)
8
9import sys
10import getopt
11
12infile = "in.json"
13outfile = "out.txt"
14
15
16def show_syntax_and_exit(code):
17 """
18 Show the program syntax and exit with an errror
19 Arguments:
20 code: the error code to return
21 """
22 print("Syntax: %s [-h] [-i inputJSONfile][-o outputfile]" % sys.argv[0])
23 sys.exit(code)
24
25
26def exit_error(code, message):
27 """
28 Show the error message and exit with an errror
29 Arguments:
30 code: the error code to return
31 message: the message to show
32 """
33 print("Error: %s" % message)
34 sys.exit(code)
35
36
37def parse_args(argv):
38 """
39 Parse the program arguments, put options in global variables
40 Arguments:
41 argv: program arguments
42 """
43 global infile, outfile
44 try:
45 opts, args = getopt.getopt(
46 argv, "hi:o:", ["help", "input", "output"]
47 )
48 except getopt.GetoptError:
49 show_syntax_and_exit(1)
50 for opt, arg in opts:
51 if opt in ("-h"):
52 show_syntax_and_exit(0)
53 elif opt in ("-i"):
54 infile = arg
55 elif opt in ("-o"):
56 outfile = arg
57
58def load_json(filename):
59 """
60 Load the JSON file, return the resulting dictionary
61 Arguments:
62 filename: the file to open
63 Returns:
64 Parsed file as a dictionary
65 """
66 import json
67
68 out = {}
69 try:
70 with open(filename, "r") as f:
71 out = json.load(f)
72 except FileNotFoundError:
73 exit_error(1, "Input file (%s) not found" % (filename))
74 except json.decoder.JSONDecodeError as error:
75 exit_error(1, "Malformed JSON file: %s" % str(error))
76 return out
77
78
79def process_data(filename, data):
80 """
81 Write the resulting CSV with one line for each package
82 Arguments:
83 filename: the file to write to
84 data: dictionary from parsing the JSON file
85 Returns:
86 None
87 """
88 if not "version" in data or data["version"] != "1":
89 exit_error(1, "Unrecognized format version number")
90 if not "package" in data:
91 exit_error(1, "Mandatory 'package' key not found")
92
93 lines = ""
94 total_issue_count = 0
95 for package in data["package"]:
96 package_info = ""
97 keys_in_package = {"name", "layer", "version", "issue"}
98 if keys_in_package - package.keys():
99 exit_error(
100 1,
101 "Missing a mandatory key in package: %s"
102 % (keys_in_package - package.keys()),
103 )
104
105 package_info += "LAYER: %s\n" % package["layer"]
106 package_info += "PACKAGE NAME: %s\n" % package["name"]
107 package_info += "PACKAGE VERSION: %s\n" % package["version"]
108
109 for issue in package["issue"]:
110 keys_in_issue = {"id", "status", "detail"}
111 if keys_in_issue - issue.keys():
112 print("Warning: Missing keys %s in 'issue' for the package '%s'"
113 % (keys_in_issue - issue.keys(), package["name"]))
114
115 lines += package_info
116 lines += "CVE: %s\n" % issue["id"]
117 lines += "CVE STATUS: %s\n" % issue["status"]
118 lines += "CVE DETAIL: %s\n" % issue["detail"]
119 if "description" in issue:
120 lines += "CVE DESCRIPTION: %s\n" % issue["description"]
121 if "summary" in issue:
122 lines += "CVE SUMMARY: %s\n" % issue["summary"]
123 if "scorev2" in issue:
124 lines += "CVSS v2 BASE SCORE: %s\n" % issue["scorev2"]
125 if "scorev3" in issue:
126 lines += "CVSS v3 BASE SCORE: %s\n" % issue["scorev3"]
127 if "scorev4" in issue:
128 lines += "CVSS v4 BASE SCORE: %s\n" % issue["scorev4"]
129 if "vector" in issue:
130 lines += "VECTOR: %s\n" % issue["vector"]
131 if "vectorString" in issue:
132 lines += "VECTORSTRING: %s\n" % issue["vectorString"]
133 lines += "MORE INFORMATION: https://nvd.nist.gov/vuln/detail/%s\n" % issue["id"]
134 lines += "\n"
135
136 with open(filename, "w") as f:
137 f.write(lines)
138
139def main(argv):
140 parse_args(argv)
141 data = load_json(infile)
142 process_data(outfile, data)
143
144
145if __name__ == "__main__":
146 main(sys.argv[1:])
diff --git a/scripts/devtool b/scripts/devtool
index 60ea3e8298..39cebec0d8 100755
--- a/scripts/devtool
+++ b/scripts/devtool
@@ -7,19 +7,17 @@
7# SPDX-License-Identifier: GPL-2.0-only 7# SPDX-License-Identifier: GPL-2.0-only
8# 8#
9 9
10import dataclasses
10import sys 11import sys
11import os 12import os
12import argparse 13import argparse
13import glob 14import glob
14import re 15import re
15import configparser 16import configparser
16import subprocess
17import logging 17import logging
18 18
19basepath = '' 19# This can be removed once our minimum is Python 3.9: https://docs.python.org/3/whatsnew/3.9.html#type-hinting-generics-in-standard-collections
20workspace = {} 20from typing import List
21config = None
22context = None
23 21
24 22
25scripts_path = os.path.dirname(os.path.realpath(__file__)) 23scripts_path = os.path.dirname(os.path.realpath(__file__))
@@ -30,16 +28,16 @@ import scriptutils
30import argparse_oe 28import argparse_oe
31logger = scriptutils.logger_create('devtool') 29logger = scriptutils.logger_create('devtool')
32 30
33plugins = []
34 31
35 32class ConfigHandler:
36class ConfigHandler(object): 33 basepath = None
37 config_file = '' 34 config_file = ''
38 config_obj = None 35 config_obj = None
39 init_path = '' 36 init_path = ''
40 workspace_path = '' 37 workspace_path = ''
41 38
42 def __init__(self, filename): 39 def __init__(self, basepath, filename):
40 self.basepath = basepath
43 self.config_file = filename 41 self.config_file = filename
44 self.config_obj = configparser.ConfigParser() 42 self.config_obj = configparser.ConfigParser()
45 43
@@ -47,7 +45,7 @@ class ConfigHandler(object):
47 try: 45 try:
48 ret = self.config_obj.get(section, option) 46 ret = self.config_obj.get(section, option)
49 except (configparser.NoOptionError, configparser.NoSectionError): 47 except (configparser.NoOptionError, configparser.NoSectionError):
50 if default != None: 48 if default is not None:
51 ret = default 49 ret = default
52 else: 50 else:
53 raise 51 raise
@@ -59,14 +57,14 @@ class ConfigHandler(object):
59 57
60 if self.config_obj.has_option('General', 'init_path'): 58 if self.config_obj.has_option('General', 'init_path'):
61 pth = self.get('General', 'init_path') 59 pth = self.get('General', 'init_path')
62 self.init_path = os.path.join(basepath, pth) 60 self.init_path = os.path.join(self.basepath, pth)
63 if not os.path.exists(self.init_path): 61 if not os.path.exists(self.init_path):
64 logger.error('init_path %s specified in config file cannot be found' % pth) 62 logger.error('init_path %s specified in config file cannot be found' % pth)
65 return False 63 return False
66 else: 64 else:
67 self.config_obj.add_section('General') 65 self.config_obj.add_section('General')
68 66
69 self.workspace_path = self.get('General', 'workspace_path', os.path.join(basepath, 'workspace')) 67 self.workspace_path = self.get('General', 'workspace_path', os.path.join(self.basepath, 'workspace'))
70 return True 68 return True
71 69
72 70
@@ -81,27 +79,29 @@ class ConfigHandler(object):
81 self.config_obj.add_section(section) 79 self.config_obj.add_section(section)
82 self.config_obj.set(section, option, value) 80 self.config_obj.set(section, option, value)
83 81
82
83@dataclasses.dataclass
84class Context: 84class Context:
85 def __init__(self, **kwargs): 85 fixed_setup: bool
86 self.__dict__.update(kwargs) 86 config: ConfigHandler
87 pluginpaths: List[str]
87 88
88 89
89def read_workspace(): 90def read_workspace(basepath, context):
90 global workspace
91 workspace = {} 91 workspace = {}
92 if not os.path.exists(os.path.join(config.workspace_path, 'conf', 'layer.conf')): 92 if not os.path.exists(os.path.join(context.config.workspace_path, 'conf', 'layer.conf')):
93 if context.fixed_setup: 93 if context.fixed_setup:
94 logger.error("workspace layer not set up") 94 logger.error("workspace layer not set up")
95 sys.exit(1) 95 sys.exit(1)
96 else: 96 else:
97 logger.info('Creating workspace layer in %s' % config.workspace_path) 97 logger.info('Creating workspace layer in %s' % context.config.workspace_path)
98 _create_workspace(config.workspace_path, config, basepath) 98 _create_workspace(context.config.workspace_path, basepath)
99 if not context.fixed_setup: 99 if not context.fixed_setup:
100 _enable_workspace_layer(config.workspace_path, config, basepath) 100 _enable_workspace_layer(context.config.workspace_path, context.config, basepath)
101 101
102 logger.debug('Reading workspace in %s' % config.workspace_path) 102 logger.debug('Reading workspace in %s' % context.config.workspace_path)
103 externalsrc_re = re.compile(r'^EXTERNALSRC(:pn-([^ =]+))? *= *"([^"]*)"$') 103 externalsrc_re = re.compile(r'^EXTERNALSRC(:pn-([^ =]+))? *= *"([^"]*)"$')
104 for fn in glob.glob(os.path.join(config.workspace_path, 'appends', '*.bbappend')): 104 for fn in glob.glob(os.path.join(context.config.workspace_path, 'appends', '*.bbappend')):
105 with open(fn, 'r') as f: 105 with open(fn, 'r') as f:
106 pnvalues = {} 106 pnvalues = {}
107 pn = None 107 pn = None
@@ -112,7 +112,7 @@ def read_workspace():
112 pn = res.group(2) or recipepn 112 pn = res.group(2) or recipepn
113 # Find the recipe file within the workspace, if any 113 # Find the recipe file within the workspace, if any
114 bbfile = os.path.basename(fn).replace('.bbappend', '.bb').replace('%', '*') 114 bbfile = os.path.basename(fn).replace('.bbappend', '.bb').replace('%', '*')
115 recipefile = glob.glob(os.path.join(config.workspace_path, 115 recipefile = glob.glob(os.path.join(context.config.workspace_path,
116 'recipes', 116 'recipes',
117 recipepn, 117 recipepn,
118 bbfile)) 118 bbfile))
@@ -126,13 +126,15 @@ def read_workspace():
126 if pnvalues: 126 if pnvalues:
127 if not pn: 127 if not pn:
128 raise DevtoolError("Found *.bbappend in %s, but could not determine EXTERNALSRC:pn-*. " 128 raise DevtoolError("Found *.bbappend in %s, but could not determine EXTERNALSRC:pn-*. "
129 "Maybe still using old syntax?" % config.workspace_path) 129 "Maybe still using old syntax?" % context.config.workspace_path)
130 if not pnvalues.get('srctreebase', None): 130 if not pnvalues.get('srctreebase', None):
131 pnvalues['srctreebase'] = pnvalues['srctree'] 131 pnvalues['srctreebase'] = pnvalues['srctree']
132 logger.debug('Found recipe %s' % pnvalues) 132 logger.debug('Found recipe %s' % pnvalues)
133 workspace[pn] = pnvalues 133 workspace[pn] = pnvalues
134 134
135def create_workspace(args, config, basepath, workspace): 135 return workspace
136
137def create_workspace(args, config, basepath, _workspace):
136 if args.layerpath: 138 if args.layerpath:
137 workspacedir = os.path.abspath(args.layerpath) 139 workspacedir = os.path.abspath(args.layerpath)
138 else: 140 else:
@@ -140,12 +142,12 @@ def create_workspace(args, config, basepath, workspace):
140 layerseries = None 142 layerseries = None
141 if args.layerseries: 143 if args.layerseries:
142 layerseries = args.layerseries 144 layerseries = args.layerseries
143 _create_workspace(workspacedir, config, basepath, layerseries) 145 _create_workspace(workspacedir, basepath, layerseries)
144 if not args.create_only: 146 if not args.create_only:
145 _enable_workspace_layer(workspacedir, config, basepath) 147 _enable_workspace_layer(workspacedir, config, basepath)
146 148
147def _create_workspace(workspacedir, config, basepath, layerseries=None): 149def _create_workspace(workspacedir, basepath, layerseries=None):
148 import bb 150 import bb.utils
149 151
150 confdir = os.path.join(workspacedir, 'conf') 152 confdir = os.path.join(workspacedir, 'conf')
151 if os.path.exists(os.path.join(confdir, 'layer.conf')): 153 if os.path.exists(os.path.join(confdir, 'layer.conf')):
@@ -190,7 +192,7 @@ def _create_workspace(workspacedir, config, basepath, layerseries=None):
190 192
191def _enable_workspace_layer(workspacedir, config, basepath): 193def _enable_workspace_layer(workspacedir, config, basepath):
192 """Ensure the workspace layer is in bblayers.conf""" 194 """Ensure the workspace layer is in bblayers.conf"""
193 import bb 195 import bb.utils
194 bblayers_conf = os.path.join(basepath, 'conf', 'bblayers.conf') 196 bblayers_conf = os.path.join(basepath, 'conf', 'bblayers.conf')
195 if not os.path.exists(bblayers_conf): 197 if not os.path.exists(bblayers_conf):
196 logger.error('Unable to find bblayers.conf') 198 logger.error('Unable to find bblayers.conf')
@@ -209,15 +211,9 @@ def _enable_workspace_layer(workspacedir, config, basepath):
209 211
210 212
211def main(): 213def main():
212 global basepath
213 global config
214 global context
215
216 if sys.getfilesystemencoding() != "utf-8": 214 if sys.getfilesystemencoding() != "utf-8":
217 sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.") 215 sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.")
218 216
219 context = Context(fixed_setup=False)
220
221 # Default basepath 217 # Default basepath
222 basepath = os.path.dirname(os.path.abspath(__file__)) 218 basepath = os.path.dirname(os.path.abspath(__file__))
223 219
@@ -242,21 +238,23 @@ def main():
242 elif global_args.quiet: 238 elif global_args.quiet:
243 logger.setLevel(logging.ERROR) 239 logger.setLevel(logging.ERROR)
244 240
241 is_fixed_setup = False
242
245 if global_args.basepath: 243 if global_args.basepath:
246 # Override 244 # Override
247 basepath = global_args.basepath 245 basepath = global_args.basepath
248 if os.path.exists(os.path.join(basepath, '.devtoolbase')): 246 if os.path.exists(os.path.join(basepath, '.devtoolbase')):
249 context.fixed_setup = True 247 is_fixed_setup = True
250 else: 248 else:
251 pth = basepath 249 pth = basepath
252 while pth != '' and pth != os.sep: 250 while pth != '' and pth != os.sep:
253 if os.path.exists(os.path.join(pth, '.devtoolbase')): 251 if os.path.exists(os.path.join(pth, '.devtoolbase')):
254 context.fixed_setup = True 252 is_fixed_setup = True
255 basepath = pth 253 basepath = pth
256 break 254 break
257 pth = os.path.dirname(pth) 255 pth = os.path.dirname(pth)
258 256
259 if not context.fixed_setup: 257 if not is_fixed_setup:
260 basepath = os.environ.get('BUILDDIR') 258 basepath = os.environ.get('BUILDDIR')
261 if not basepath: 259 if not basepath:
262 logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)") 260 logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)")
@@ -264,10 +262,9 @@ def main():
264 262
265 logger.debug('Using basepath %s' % basepath) 263 logger.debug('Using basepath %s' % basepath)
266 264
267 config = ConfigHandler(os.path.join(basepath, 'conf', 'devtool.conf')) 265 config = ConfigHandler(basepath, os.path.join(basepath, 'conf', 'devtool.conf'))
268 if not config.read(): 266 if not config.read():
269 return -1 267 return -1
270 context.config = config
271 268
272 bitbake_subdir = config.get('General', 'bitbake_subdir', '') 269 bitbake_subdir = config.get('General', 'bitbake_subdir', '')
273 if bitbake_subdir: 270 if bitbake_subdir:
@@ -289,6 +286,7 @@ def main():
289 scriptutils.logger_setup_color(logger, global_args.color) 286 scriptutils.logger_setup_color(logger, global_args.color)
290 287
291 if global_args.bbpath is None: 288 if global_args.bbpath is None:
289 import bb
292 try: 290 try:
293 tinfoil = setup_tinfoil(config_only=True, basepath=basepath) 291 tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
294 try: 292 try:
@@ -300,7 +298,10 @@ def main():
300 298
301 # Search BBPATH first to allow layers to override plugins in scripts_path 299 # Search BBPATH first to allow layers to override plugins in scripts_path
302 pluginpaths = [os.path.join(path, 'lib', 'devtool') for path in global_args.bbpath.split(':') + [scripts_path]] 300 pluginpaths = [os.path.join(path, 'lib', 'devtool') for path in global_args.bbpath.split(':') + [scripts_path]]
303 context.pluginpaths = pluginpaths 301
302 context = Context(fixed_setup=is_fixed_setup, config=config, pluginpaths=pluginpaths)
303
304 plugins = []
304 for pluginpath in pluginpaths: 305 for pluginpath in pluginpaths:
305 scriptutils.load_plugins(logger, plugins, pluginpath) 306 scriptutils.load_plugins(logger, plugins, pluginpath)
306 307
@@ -331,9 +332,9 @@ def main():
331 args = parser.parse_args(unparsed_args, namespace=global_args) 332 args = parser.parse_args(unparsed_args, namespace=global_args)
332 333
333 try: 334 try:
335 workspace = {}
334 if not getattr(args, 'no_workspace', False): 336 if not getattr(args, 'no_workspace', False):
335 read_workspace() 337 workspace = read_workspace(basepath, context)
336
337 ret = args.func(args, config, basepath, workspace) 338 ret = args.func(args, config, basepath, workspace)
338 except DevtoolError as err: 339 except DevtoolError as err:
339 if str(err): 340 if str(err):
@@ -341,6 +342,7 @@ def main():
341 ret = err.exitcode 342 ret = err.exitcode
342 except argparse_oe.ArgumentUsageError as ae: 343 except argparse_oe.ArgumentUsageError as ae:
343 parser.error_subcommand(ae.message, ae.subcommand) 344 parser.error_subcommand(ae.message, ae.subcommand)
345 ret = 2
344 346
345 return ret 347 return ret
346 348
diff --git a/scripts/gen-site-config b/scripts/gen-site-config
deleted file mode 100755
index 727b809c0f..0000000000
--- a/scripts/gen-site-config
+++ /dev/null
@@ -1,43 +0,0 @@
1#! /bin/sh
2# Copyright (c) 2005-2008 Wind River Systems, Inc.
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7cat << EOF
8AC_PREREQ(2.57)
9AC_INIT([site_wide],[1.0.0])
10
11EOF
12
13# Disable as endian is set in the default config
14#echo AC_C_BIGENDIAN
15#echo
16
17if [ -e $1/types ] ; then
18 while read type ; do
19 echo "AC_CHECK_SIZEOF([$type])"
20 done < $1/types
21
22 echo
23fi
24
25if [ -e $1/funcs ]; then
26 while read func ; do
27 echo "AC_CHECK_FUNCS([$func])"
28 done < $1/funcs
29
30 echo
31fi
32
33if [ -e $1/headers ]; then
34 while read header ; do
35 echo "AC_CHECK_HEADERS([$header])"
36 done < $1/headers
37
38 echo
39fi
40
41cat << EOF
42AC_OUTPUT
43EOF
diff --git a/scripts/install-buildtools b/scripts/install-buildtools
index 2218f3ffac..aa23942858 100755
--- a/scripts/install-buildtools
+++ b/scripts/install-buildtools
@@ -56,9 +56,9 @@ PROGNAME = 'install-buildtools'
56logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout) 56logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout)
57 57
58DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools') 58DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools')
59DEFAULT_BASE_URL = 'http://downloads.yoctoproject.org/releases/yocto' 59DEFAULT_BASE_URL = 'https://downloads.yoctoproject.org/releases/yocto'
60DEFAULT_RELEASE = 'yocto-4.1' 60DEFAULT_RELEASE = 'yocto-5.2.1'
61DEFAULT_INSTALLER_VERSION = '4.1' 61DEFAULT_INSTALLER_VERSION = '5.2.1'
62DEFAULT_BUILDDATE = '202110XX' 62DEFAULT_BUILDDATE = '202110XX'
63 63
64# Python version sanity check 64# Python version sanity check
@@ -102,6 +102,16 @@ def sha256_file(filename):
102 import hashlib 102 import hashlib
103 return _hasher(hashlib.sha256(), filename) 103 return _hasher(hashlib.sha256(), filename)
104 104
105def remove_quotes(var):
106 """
107 If a variable starts and ends with double quotes, remove them.
108 Assumption: if a variable starts with double quotes, it must also
109 end with them.
110 """
111 if var[0] == '"':
112 var = var[1:-1]
113 return var
114
105 115
106def main(): 116def main():
107 global DEFAULT_INSTALL_DIR 117 global DEFAULT_INSTALL_DIR
@@ -117,7 +127,8 @@ def main():
117 127
118 parser = argparse.ArgumentParser( 128 parser = argparse.ArgumentParser(
119 description="Buildtools installation helper", 129 description="Buildtools installation helper",
120 add_help=False) 130 add_help=False,
131 formatter_class=argparse.RawTextHelpFormatter)
121 parser.add_argument('-u', '--url', 132 parser.add_argument('-u', '--url',
122 help='URL from where to fetch buildtools SDK installer, not ' 133 help='URL from where to fetch buildtools SDK installer, not '
123 'including filename (optional)\n' 134 'including filename (optional)\n'
@@ -131,6 +142,9 @@ def main():
131 default=DEFAULT_INSTALL_DIR, 142 default=DEFAULT_INSTALL_DIR,
132 help='directory where buildtools SDK will be installed (optional)', 143 help='directory where buildtools SDK will be installed (optional)',
133 action='store') 144 action='store')
145 parser.add_argument('--downloads-directory',
146 help='use this directory for tarball/checksum downloads and do not erase them (default is a temporary directory which is deleted after unpacking and installing the buildtools)',
147 action='store')
134 parser.add_argument('-r', '--release', 148 parser.add_argument('-r', '--release',
135 default=DEFAULT_RELEASE, 149 default=DEFAULT_RELEASE,
136 help='Yocto Project release string for SDK which will be ' 150 help='Yocto Project release string for SDK which will be '
@@ -224,11 +238,14 @@ def main():
224 safe_filename = quote(filename) 238 safe_filename = quote(filename)
225 buildtools_url = "%s/%s/buildtools/%s" % (base_url, args.release, safe_filename) 239 buildtools_url = "%s/%s/buildtools/%s" % (base_url, args.release, safe_filename)
226 240
227 tmpsdk_dir = tempfile.mkdtemp() 241 sdk_dir = args.downloads_directory or tempfile.mkdtemp()
242 os.makedirs(sdk_dir, exist_ok=True)
228 try: 243 try:
229 # Fetch installer 244 # Fetch installer
230 logger.info("Fetching buildtools installer") 245 logger.info("Fetching buildtools installer")
231 tmpbuildtools = os.path.join(tmpsdk_dir, filename) 246 tmpbuildtools = os.path.join(sdk_dir, filename)
247 with open(os.path.join(sdk_dir, 'buildtools_url'), 'w') as f:
248 f.write(buildtools_url)
232 ret = subprocess.call("wget -q -O %s %s" % 249 ret = subprocess.call("wget -q -O %s %s" %
233 (tmpbuildtools, buildtools_url), shell=True) 250 (tmpbuildtools, buildtools_url), shell=True)
234 if ret != 0: 251 if ret != 0:
@@ -238,19 +255,17 @@ def main():
238 # Verify checksum 255 # Verify checksum
239 if args.check: 256 if args.check:
240 logger.info("Fetching buildtools installer checksum") 257 logger.info("Fetching buildtools installer checksum")
241 checksum_type = "" 258 checksum_type = "sha256sum"
242 for checksum_type in ["md5sum", "sha256sum"]: 259 checksum_url = "{}.{}".format(buildtools_url, checksum_type)
243 check_url = "{}.{}".format(buildtools_url, checksum_type) 260 checksum_filename = "{}.{}".format(filename, checksum_type)
244 checksum_filename = "{}.{}".format(filename, checksum_type) 261 tmpbuildtools_checksum = os.path.join(sdk_dir, checksum_filename)
245 tmpbuildtools_checksum = os.path.join(tmpsdk_dir, checksum_filename) 262 with open(os.path.join(sdk_dir, 'checksum_url'), 'w') as f:
246 ret = subprocess.call("wget -q -O %s %s" % 263 f.write(checksum_url)
247 (tmpbuildtools_checksum, check_url), shell=True) 264 ret = subprocess.call("wget -q -O %s %s" %
248 if ret == 0: 265 (tmpbuildtools_checksum, checksum_url), shell=True)
249 break 266 if ret != 0:
250 else: 267 logger.error("Could not download file from %s" % checksum_url)
251 if ret != 0: 268 return ret
252 logger.error("Could not download file from %s" % check_url)
253 return ret
254 regex = re.compile(r"^(?P<checksum>[0-9a-f]+)\s+(?P<path>.*/)?(?P<filename>.*)$") 269 regex = re.compile(r"^(?P<checksum>[0-9a-f]+)\s+(?P<path>.*/)?(?P<filename>.*)$")
255 with open(tmpbuildtools_checksum, 'rb') as f: 270 with open(tmpbuildtools_checksum, 'rb') as f:
256 original = f.read() 271 original = f.read()
@@ -263,10 +278,7 @@ def main():
263 logger.error("Filename does not match name in checksum") 278 logger.error("Filename does not match name in checksum")
264 return 1 279 return 1
265 checksum = m.group('checksum') 280 checksum = m.group('checksum')
266 if checksum_type == "md5sum": 281 checksum_value = sha256_file(tmpbuildtools)
267 checksum_value = md5_file(tmpbuildtools)
268 else:
269 checksum_value = sha256_file(tmpbuildtools)
270 if checksum == checksum_value: 282 if checksum == checksum_value:
271 logger.info("Checksum success") 283 logger.info("Checksum success")
272 else: 284 else:
@@ -280,7 +292,7 @@ def main():
280 os.chmod(tmpbuildtools, st.st_mode | stat.S_IEXEC) 292 os.chmod(tmpbuildtools, st.st_mode | stat.S_IEXEC)
281 logger.debug(os.stat(tmpbuildtools)) 293 logger.debug(os.stat(tmpbuildtools))
282 if args.directory: 294 if args.directory:
283 install_dir = args.directory 295 install_dir = os.path.abspath(args.directory)
284 ret = subprocess.call("%s -d %s -y" % 296 ret = subprocess.call("%s -d %s -y" %
285 (tmpbuildtools, install_dir), shell=True) 297 (tmpbuildtools, install_dir), shell=True)
286 else: 298 else:
@@ -301,7 +313,7 @@ def main():
301 if match: 313 if match:
302 env_var = match.group('env_var') 314 env_var = match.group('env_var')
303 logger.debug("env_var: %s" % env_var) 315 logger.debug("env_var: %s" % env_var)
304 env_val = match.group('env_val') 316 env_val = remove_quotes(match.group('env_val'))
305 logger.debug("env_val: %s" % env_val) 317 logger.debug("env_val: %s" % env_val)
306 os.environ[env_var] = env_val 318 os.environ[env_var] = env_val
307 319
@@ -343,7 +355,8 @@ def main():
343 355
344 finally: 356 finally:
345 # cleanup tmp directory 357 # cleanup tmp directory
346 shutil.rmtree(tmpsdk_dir) 358 if not args.downloads_directory:
359 shutil.rmtree(sdk_dir)
347 360
348 361
349if __name__ == '__main__': 362if __name__ == '__main__':
diff --git a/scripts/lib/build_perf/html/measurement_chart.html b/scripts/lib/build_perf/html/measurement_chart.html
index ad4a93ed02..86435273cf 100644
--- a/scripts/lib/build_perf/html/measurement_chart.html
+++ b/scripts/lib/build_perf/html/measurement_chart.html
@@ -2,7 +2,7 @@
2 // Get raw data 2 // Get raw data
3 const rawData = [ 3 const rawData = [
4 {% for sample in measurement.samples %} 4 {% for sample in measurement.samples %}
5 [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}, {{ sample.start_time }}], 5 [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}, {{ sample.start_time }}, '{{sample.commit}}'],
6 {% endfor %} 6 {% endfor %}
7 ]; 7 ];
8 8
@@ -26,27 +26,37 @@
26 ] 26 ]
27 }); 27 });
28 28
29 const commitCountList = rawData.map(([commit, value, time]) => {
30 return commit
31 });
32
33 const commitCountData = rawData.map(([commit, value, time]) => {
34 return updateValue(value)
35 });
36
29 // Set chart options 37 // Set chart options
30 const option = { 38 const option_start_time = {
31 tooltip: { 39 tooltip: {
32 trigger: 'axis', 40 trigger: 'axis',
33 valueFormatter: (value) => { 41 enterable: true,
34 const commitNumber = rawData.filter(([commit, dataValue, time]) => updateValue(dataValue) === value) 42 position: function (point, params, dom, rect, size) {
43 return [point[0], '0%'];
44 },
45 formatter: function (param) {
46 const value = param[0].value[1]
47 const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value)
48 const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)');
49
50 // Add commit hash to the tooltip as a link
51 const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}`
35 if ('{{ measurement.value_type.quantity }}' == 'time') { 52 if ('{{ measurement.value_type.quantity }}' == 'time') {
36 const hours = Math.floor(value/60) 53 const hours = Math.floor(value/60)
37 const minutes = Math.floor(value % 60) 54 const minutes = Math.floor(value % 60)
38 const seconds = Math.floor((value * 60) % 60) 55 const seconds = Math.floor((value * 60) % 60)
39 return [ 56 return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
40 hours + ':' + minutes + ':' + seconds + ', ' +
41 'commit number: ' + commitNumber[0][0]
42 ]
43 } 57 }
44 return [ 58 return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
45 value.toFixed(2) + ' MB' + ', ' + 59 ;}
46 'commit number: ' + commitNumber[0][0]
47 ]
48 },
49
50 }, 60 },
51 xAxis: { 61 xAxis: {
52 type: 'time', 62 type: 'time',
@@ -72,29 +82,87 @@
72 { 82 {
73 name: '{{ measurement.value_type.quantity }}', 83 name: '{{ measurement.value_type.quantity }}',
74 type: 'line', 84 type: 'line',
75 step: 'start',
76 symbol: 'none', 85 symbol: 'none',
77 data: data 86 data: data
78 } 87 }
79 ] 88 ]
80 }; 89 };
81 90
91 const option_commit_count = {
92 tooltip: {
93 trigger: 'axis',
94 enterable: true,
95 position: function (point, params, dom, rect, size) {
96 return [point[0], '0%'];
97 },
98 formatter: function (param) {
99 const value = param[0].value
100 const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value)
101 const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)');
102 // Add commit hash to the tooltip as a link
103 const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}`
104 if ('{{ measurement.value_type.quantity }}' == 'time') {
105 const hours = Math.floor(value/60)
106 const minutes = Math.floor(value % 60)
107 const seconds = Math.floor((value * 60) % 60)
108 return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
109 }
110 return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
111 ;}
112 },
113 xAxis: {
114 name: 'Commit count',
115 type: 'category',
116 data: commitCountList
117 },
118 yAxis: {
119 name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB',
120 type: 'value',
121 min: function(value) {
122 return Math.round(value.min - 0.5);
123 },
124 max: function(value) {
125 return Math.round(value.max + 0.5);
126 }
127 },
128 dataZoom: [
129 {
130 type: 'slider',
131 xAxisIndex: 0,
132 filterMode: 'none'
133 },
134 ],
135 series: [
136 {
137 name: '{{ measurement.value_type.quantity }}',
138 type: 'line',
139 symbol: 'none',
140 data: commitCountData
141 }
142 ]
143 };
144
82 // Draw chart 145 // Draw chart
83 const chart_div = document.getElementById('{{ chart_elem_id }}'); 146 const draw_chart = (chart_id, option) => {
84 // Set dark mode 147 let chart_name
85 let measurement_chart 148 const chart_div = document.getElementById(chart_id);
86 if (window.matchMedia('(prefers-color-scheme: dark)').matches) { 149 // Set dark mode
87 measurement_chart= echarts.init(chart_div, 'dark', { 150 if (window.matchMedia('(prefers-color-scheme: dark)').matches) {
88 height: 320 151 chart_name= echarts.init(chart_div, 'dark', {
89 }); 152 height: 320
90 } else { 153 });
91 measurement_chart= echarts.init(chart_div, null, { 154 } else {
92 height: 320 155 chart_name= echarts.init(chart_div, null, {
156 height: 320
157 });
158 }
159 // Change chart size with browser resize
160 window.addEventListener('resize', function() {
161 chart_name.resize();
93 }); 162 });
163 return chart_name.setOption(option);
94 } 164 }
95 // Change chart size with browser resize 165
96 window.addEventListener('resize', function() { 166 draw_chart('{{ chart_elem_start_time_id }}', option_start_time)
97 measurement_chart.resize(); 167 draw_chart('{{ chart_elem_commit_count_id }}', option_commit_count)
98 });
99 measurement_chart.setOption(option);
100</script> 168</script>
diff --git a/scripts/lib/build_perf/html/report.html b/scripts/lib/build_perf/html/report.html
index 537ed3ee52..28cd80e738 100644
--- a/scripts/lib/build_perf/html/report.html
+++ b/scripts/lib/build_perf/html/report.html
@@ -9,7 +9,8 @@
9{% for test in test_data %} 9{% for test in test_data %}
10 {% if test.status == 'SUCCESS' %} 10 {% if test.status == 'SUCCESS' %}
11 {% for measurement in test.measurements %} 11 {% for measurement in test.measurements %}
12 {% set chart_elem_id = test.name + '_' + measurement.name + '_chart' %} 12 {% set chart_elem_start_time_id = test.name + '_' + measurement.name + '_chart_start_time' %}
13 {% set chart_elem_commit_count_id = test.name + '_' + measurement.name + '_chart_commit_count' %}
13 {% include 'measurement_chart.html' %} 14 {% include 'measurement_chart.html' %}
14 {% endfor %} 15 {% endfor %}
15 {% endif %} 16 {% endif %}
@@ -116,6 +117,22 @@ a {
116a:hover { 117a:hover {
117 color: #8080ff; 118 color: #8080ff;
118} 119}
120button {
121 background-color: #F3F4F6;
122 border: none;
123 outline: none;
124 cursor: pointer;
125 padding: 10px 12px;
126 transition: 0.3s;
127 border-radius: 8px;
128 color: #3A4353;
129}
130button:hover {
131 background-color: #d6d9e0;
132}
133.tab button.active {
134 background-color: #d6d9e0;
135}
119@media (prefers-color-scheme: dark) { 136@media (prefers-color-scheme: dark) {
120 :root { 137 :root {
121 --text: #e9e8fa; 138 --text: #e9e8fa;
@@ -126,6 +143,16 @@ a:hover {
126 --trborder: #212936; 143 --trborder: #212936;
127 --chartborder: #b1b0bf; 144 --chartborder: #b1b0bf;
128 } 145 }
146 button {
147 background-color: #28303E;
148 color: #fff;
149 }
150 button:hover {
151 background-color: #545a69;
152 }
153 .tab button.active {
154 background-color: #545a69;
155 }
129} 156}
130</style> 157</style>
131 158
@@ -233,7 +260,18 @@ a:hover {
233 <tr> 260 <tr>
234 <td style="width: 75%"> 261 <td style="width: 75%">
235 {# Linechart #} 262 {# Linechart #}
236 <div id="{{ test.name }}_{{ measurement.name }}_chart"></div> 263 <div class="tab {{ test.name }}_{{ measurement.name }}_tablinks">
264 <button class="tablinks active" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_start_time', '{{ test.name }}_{{ measurement.name }}')">Chart with start time</button>
265 <button class="tablinks" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_commit_count', '{{ test.name }}_{{ measurement.name }}')">Chart with commit count</button>
266 </div>
267 <div class="{{ test.name }}_{{ measurement.name }}_tabcontent">
268 <div id="{{ test.name }}_{{ measurement.name }}_start_time" class="tabcontent" style="display: block;">
269 <div id="{{ test.name }}_{{ measurement.name }}_chart_start_time"></div>
270 </div>
271 <div id="{{ test.name }}_{{ measurement.name }}_commit_count" class="tabcontent" style="display: none;">
272 <div id="{{ test.name }}_{{ measurement.name }}_chart_commit_count"></div>
273 </div>
274 </div>
237 </td> 275 </td>
238 <td> 276 <td>
239 {# Measurement statistics #} 277 {# Measurement statistics #}
@@ -340,6 +378,31 @@ a:hover {
340 <div class="preformatted">{{ test.message }}</div> 378 <div class="preformatted">{{ test.message }}</div>
341 {% endif %} 379 {% endif %}
342 {% endfor %} 380 {% endfor %}
343</div></body> 381</div>
344</html>
345 382
383<script>
384function openChart(event, chartType, chartName) {
385 let i, tabcontents, tablinks
386 tabcontents = document.querySelectorAll(`.${chartName}_tabcontent > .tabcontent`);
387 tabcontents.forEach((tabcontent) => {
388 tabcontent.style.display = "none";
389 });
390
391 tablinks = document.querySelectorAll(`.${chartName}_tablinks > .tablinks`);
392 tablinks.forEach((tabLink) => {
393 tabLink.classList.remove('active');
394 });
395
396 const targetTab = document.getElementById(chartType)
397 targetTab.style.display = "block";
398
399 // Call resize on the ECharts instance to redraw the chart
400 const chartContainer = targetTab.querySelector('div')
401 echarts.init(chartContainer).resize();
402
403 event.currentTarget.classList.add('active');
404}
405</script>
406
407</body>
408</html>
diff --git a/scripts/lib/checklayer/__init__.py b/scripts/lib/checklayer/__init__.py
index 62ecdfe390..86aadf39a6 100644
--- a/scripts/lib/checklayer/__init__.py
+++ b/scripts/lib/checklayer/__init__.py
@@ -452,3 +452,15 @@ def compare_signatures(old_sigs, curr_sigs):
452 msg.extend([' ' + line for line in output.splitlines()]) 452 msg.extend([' ' + line for line in output.splitlines()])
453 msg.append('') 453 msg.append('')
454 return '\n'.join(msg) 454 return '\n'.join(msg)
455
456
457def get_git_toplevel(directory):
458 """
459 Try and find the top of the git repository that directory might be in.
460 Returns the top-level directory, or None.
461 """
462 cmd = ["git", "-C", directory, "rev-parse", "--show-toplevel"]
463 try:
464 return subprocess.check_output(cmd, text=True).strip()
465 except:
466 return None
diff --git a/scripts/lib/checklayer/cases/common.py b/scripts/lib/checklayer/cases/common.py
index 97b16f78c8..ddead69a7b 100644
--- a/scripts/lib/checklayer/cases/common.py
+++ b/scripts/lib/checklayer/cases/common.py
@@ -7,7 +7,7 @@ import glob
7import os 7import os
8import unittest 8import unittest
9import re 9import re
10from checklayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures 10from checklayer import get_signatures, LayerType, check_command, compare_signatures, get_git_toplevel
11from checklayer.case import OECheckLayerTestCase 11from checklayer.case import OECheckLayerTestCase
12 12
13class CommonCheckLayer(OECheckLayerTestCase): 13class CommonCheckLayer(OECheckLayerTestCase):
@@ -40,6 +40,38 @@ class CommonCheckLayer(OECheckLayerTestCase):
40 email_regex = re.compile(r"[^@]+@[^@]+") 40 email_regex = re.compile(r"[^@]+@[^@]+")
41 self.assertTrue(email_regex.match(data)) 41 self.assertTrue(email_regex.match(data))
42 42
43 def find_file_by_name(self, globs):
44 """
45 Utility function to find a file that matches the specified list of
46 globs, in either the layer directory itself or the repository top-level
47 directory.
48 """
49 directories = [self.tc.layer["path"]]
50 toplevel = get_git_toplevel(directories[0])
51 if toplevel:
52 directories.append(toplevel)
53
54 for path in directories:
55 for name in globs:
56 files = glob.glob(os.path.join(path, name))
57 if files:
58 return sorted(files)[0]
59 return None
60
61 def test_security(self):
62 """
63 Test that the layer has a SECURITY.md (or similar) file, either in the
64 layer itself or at the top of the containing git repository.
65 """
66 if self.tc.layer["type"] == LayerType.CORE:
67 raise unittest.SkipTest("Core layer's SECURITY is top level")
68
69 filename = self.find_file_by_name(("SECURITY", "SECURITY.*"))
70 self.assertTrue(filename, msg="Layer doesn't contain a SECURITY.md file.")
71
72 size = os.path.getsize(filename)
73 self.assertGreater(size, 0, msg=f"{filename} has no content.")
74
43 def test_parse(self): 75 def test_parse(self):
44 check_command('Layer %s failed to parse.' % self.tc.layer['name'], 76 check_command('Layer %s failed to parse.' % self.tc.layer['name'],
45 'bitbake -p') 77 'bitbake -p')
@@ -72,7 +104,6 @@ class CommonCheckLayer(OECheckLayerTestCase):
72 self.tc.layer['name']) 104 self.tc.layer['name'])
73 self.fail('\n'.join(msg)) 105 self.fail('\n'.join(msg))
74 106
75 @unittest.expectedFailure
76 def test_patches_upstream_status(self): 107 def test_patches_upstream_status(self):
77 import sys 108 import sys
78 sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/')) 109 sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/'))
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py
index 6133c1c5b4..fa6e1a34fd 100644
--- a/scripts/lib/devtool/__init__.py
+++ b/scripts/lib/devtool/__init__.py
@@ -234,7 +234,7 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
234 f.write(line) 234 f.write(line)
235 235
236 bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) 236 bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
237 bb.process.run('git tag -f %s' % basetag, cwd=repodir) 237 bb.process.run('git tag -f --no-sign %s' % basetag, cwd=repodir)
238 238
239 # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now, 239 # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now,
240 # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe 240 # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe
@@ -256,7 +256,7 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
256 oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d) 256 oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d)
257 found = False 257 found = False
258 if os.path.exists(os.path.join(repodir, '.gitmodules')): 258 if os.path.exists(os.path.join(repodir, '.gitmodules')):
259 bb.process.run('git submodule foreach --recursive "git tag -f %s"' % basetag, cwd=repodir) 259 bb.process.run('git submodule foreach --recursive "git tag -f --no-sign %s"' % basetag, cwd=repodir)
260 260
261def recipe_to_append(recipefile, config, wildcard=False): 261def recipe_to_append(recipefile, config, wildcard=False):
262 """ 262 """
diff --git a/scripts/lib/devtool/build.py b/scripts/lib/devtool/build.py
index 935ffab46c..0b2c3d33dc 100644
--- a/scripts/lib/devtool/build.py
+++ b/scripts/lib/devtool/build.py
@@ -49,7 +49,7 @@ def build(args, config, basepath, workspace):
49 rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) 49 rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
50 if not rd: 50 if not rd:
51 return 1 51 return 1
52 deploytask = 'do_deploy' in rd.getVar('__BBTASKS') 52 deploytask = 'do_deploy' in bb.build.listtasks(rd)
53 finally: 53 finally:
54 tinfoil.shutdown() 54 tinfoil.shutdown()
55 55
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py
index 1cd4831d2b..990303982c 100644
--- a/scripts/lib/devtool/build_sdk.py
+++ b/scripts/lib/devtool/build_sdk.py
@@ -5,14 +5,7 @@
5# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
6# 6#
7 7
8import os
9import subprocess
10import logging 8import logging
11import glob
12import shutil
13import errno
14import sys
15import tempfile
16from devtool import DevtoolError 9from devtool import DevtoolError
17from devtool import build_image 10from devtool import build_image
18 11
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py
index a62b93224e..ee5bb57265 100644
--- a/scripts/lib/devtool/ide_plugins/ide_code.py
+++ b/scripts/lib/devtool/ide_plugins/ide_code.py
@@ -161,7 +161,6 @@ class IdeVSCode(IdeBase):
161 if modified_recipe.build_tool is not BuildTool.CMAKE: 161 if modified_recipe.build_tool is not BuildTool.CMAKE:
162 return 162 return
163 recommendations += [ 163 recommendations += [
164 "twxs.cmake",
165 "ms-vscode.cmake-tools", 164 "ms-vscode.cmake-tools",
166 "ms-vscode.cpptools", 165 "ms-vscode.cpptools",
167 "ms-vscode.cpptools-extension-pack", 166 "ms-vscode.cpptools-extension-pack",
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py
index 65873b088d..931408fa74 100755
--- a/scripts/lib/devtool/ide_sdk.py
+++ b/scripts/lib/devtool/ide_sdk.py
@@ -167,7 +167,7 @@ class RecipeImage:
167 self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg') 167 self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg')
168 168
169 self.gdbserver_missing = 'gdbserver' not in image_d.getVar( 169 self.gdbserver_missing = 'gdbserver' not in image_d.getVar(
170 'IMAGE_INSTALL') 170 'IMAGE_INSTALL') and 'tools-debug' not in image_d.getVar('IMAGE_FEATURES')
171 171
172 @property 172 @property
173 def debug_support(self): 173 def debug_support(self):
@@ -288,6 +288,7 @@ class RecipeModified:
288 self.bblayers = None 288 self.bblayers = None
289 self.bpn = None 289 self.bpn = None
290 self.d = None 290 self.d = None
291 self.debug_build = None
291 self.fakerootcmd = None 292 self.fakerootcmd = None
292 self.fakerootenv = None 293 self.fakerootenv = None
293 self.libdir = None 294 self.libdir = None
@@ -333,7 +334,7 @@ class RecipeModified:
333 self.srctree = workspace[workspacepn]['srctree'] 334 self.srctree = workspace[workspacepn]['srctree']
334 # Need to grab this here in case the source is within a subdirectory 335 # Need to grab this here in case the source is within a subdirectory
335 self.real_srctree = get_real_srctree( 336 self.real_srctree = get_real_srctree(
336 self.srctree, recipe_d.getVar('S'), recipe_d.getVar('WORKDIR')) 337 self.srctree, recipe_d.getVar('S'), recipe_d.getVar('UNPACKDIR'))
337 self.bbappend = workspace[workspacepn]['bbappend'] 338 self.bbappend = workspace[workspacepn]['bbappend']
338 339
339 self.ide_sdk_dir = os.path.join( 340 self.ide_sdk_dir = os.path.join(
@@ -348,6 +349,7 @@ class RecipeModified:
348 self.bpn = recipe_d.getVar('BPN') 349 self.bpn = recipe_d.getVar('BPN')
349 self.cxx = recipe_d.getVar('CXX') 350 self.cxx = recipe_d.getVar('CXX')
350 self.d = recipe_d.getVar('D') 351 self.d = recipe_d.getVar('D')
352 self.debug_build = recipe_d.getVar('DEBUG_BUILD')
351 self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD') 353 self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD')
352 self.fakerootenv = recipe_d.getVar('FAKEROOTENV') 354 self.fakerootenv = recipe_d.getVar('FAKEROOTENV')
353 self.libdir = recipe_d.getVar('libdir') 355 self.libdir = recipe_d.getVar('libdir')
@@ -389,17 +391,6 @@ class RecipeModified:
389 self.recipe_id = self.bpn + "-" + self.package_arch 391 self.recipe_id = self.bpn + "-" + self.package_arch
390 self.recipe_id_pretty = self.bpn + ": " + self.package_arch 392 self.recipe_id_pretty = self.bpn + ": " + self.package_arch
391 393
392 def append_to_bbappend(self, append_text):
393 with open(self.bbappend, 'a') as bbap:
394 bbap.write(append_text)
395
396 def remove_from_bbappend(self, append_text):
397 with open(self.bbappend, 'r') as bbap:
398 text = bbap.read()
399 new_text = text.replace(append_text, '')
400 with open(self.bbappend, 'w') as bbap:
401 bbap.write(new_text)
402
403 @staticmethod 394 @staticmethod
404 def is_valid_shell_variable(var): 395 def is_valid_shell_variable(var):
405 """Skip strange shell variables like systemd 396 """Skip strange shell variables like systemd
@@ -412,34 +403,6 @@ class RecipeModified:
412 return True 403 return True
413 return False 404 return False
414 405
415 def debug_build_config(self, args):
416 """Explicitely set for example CMAKE_BUILD_TYPE to Debug if not defined otherwise"""
417 if self.build_tool is BuildTool.CMAKE:
418 append_text = os.linesep + \
419 'OECMAKE_ARGS:append = " -DCMAKE_BUILD_TYPE:STRING=Debug"' + os.linesep
420 if args.debug_build_config and not 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars:
421 self.cmake_cache_vars['CMAKE_BUILD_TYPE'] = {
422 "type": "STRING",
423 "value": "Debug",
424 }
425 self.append_to_bbappend(append_text)
426 elif 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars:
427 del self.cmake_cache_vars['CMAKE_BUILD_TYPE']
428 self.remove_from_bbappend(append_text)
429 elif self.build_tool is BuildTool.MESON:
430 append_text = os.linesep + 'MESON_BUILDTYPE = "debug"' + os.linesep
431 if args.debug_build_config and self.meson_buildtype != "debug":
432 self.mesonopts.replace(
433 '--buildtype ' + self.meson_buildtype, '--buildtype debug')
434 self.append_to_bbappend(append_text)
435 elif self.meson_buildtype == "debug":
436 self.mesonopts.replace(
437 '--buildtype debug', '--buildtype plain')
438 self.remove_from_bbappend(append_text)
439 elif args.debug_build_config:
440 logger.warn(
441 "--debug-build-config is not implemented for this build tool yet.")
442
443 def solib_search_path(self, image): 406 def solib_search_path(self, image):
444 """Search for debug symbols in the rootfs and rootfs-dbg 407 """Search for debug symbols in the rootfs and rootfs-dbg
445 408
@@ -493,7 +456,7 @@ class RecipeModified:
493 456
494 vars = (key for key in d.keys() if not key.startswith( 457 vars = (key for key in d.keys() if not key.startswith(
495 "__") and not d.getVarFlag(key, "func", False)) 458 "__") and not d.getVarFlag(key, "func", False))
496 for var in vars: 459 for var in sorted(vars):
497 func = d.getVarFlag(var, "func", False) 460 func = d.getVarFlag(var, "func", False)
498 if d.getVarFlag(var, 'python', False) and func: 461 if d.getVarFlag(var, 'python', False) and func:
499 continue 462 continue
@@ -545,7 +508,7 @@ class RecipeModified:
545 cache_vars = {} 508 cache_vars = {}
546 oecmake_args = d.getVar('OECMAKE_ARGS').split() 509 oecmake_args = d.getVar('OECMAKE_ARGS').split()
547 extra_oecmake = d.getVar('EXTRA_OECMAKE').split() 510 extra_oecmake = d.getVar('EXTRA_OECMAKE').split()
548 for param in oecmake_args + extra_oecmake: 511 for param in sorted(oecmake_args + extra_oecmake):
549 d_pref = "-D" 512 d_pref = "-D"
550 if param.startswith(d_pref): 513 if param.startswith(d_pref):
551 param = param[len(d_pref):] 514 param = param[len(d_pref):]
@@ -712,42 +675,6 @@ class RecipeModified:
712 binaries.append(abs_name[d_len:]) 675 binaries.append(abs_name[d_len:])
713 return sorted(binaries) 676 return sorted(binaries)
714 677
715 def gen_delete_package_dirs(self):
716 """delete folders of package tasks
717
718 This is a workaround for and issue with recipes having their sources
719 downloaded as file://
720 This likely breaks pseudo like:
721 path mismatch [3 links]: ino 79147802 db
722 .../build/tmp/.../cmake-example/1.0/package/usr/src/debug/
723 cmake-example/1.0-r0/oe-local-files/cpp-example-lib.cpp
724 .../build/workspace/sources/cmake-example/oe-local-files/cpp-example-lib.cpp
725 Since the files are anyway outdated lets deleted them (also from pseudo's db) to workaround this issue.
726 """
727 cmd_lines = ['#!/bin/sh']
728
729 # Set up the appropriate environment
730 newenv = dict(os.environ)
731 for varvalue in self.fakerootenv.split():
732 if '=' in varvalue:
733 splitval = varvalue.split('=', 1)
734 newenv[splitval[0]] = splitval[1]
735
736 # Replicate the environment variables from bitbake
737 for var, val in newenv.items():
738 if not RecipeModified.is_valid_shell_variable(var):
739 continue
740 cmd_lines.append('%s="%s"' % (var, val))
741 cmd_lines.append('export %s' % var)
742
743 # Delete the folders
744 pkg_dirs = ' '.join([os.path.join(self.workdir, d) for d in [
745 "package", "packages-split", "pkgdata", "sstate-install-package", "debugsources.list", "*.spec"]])
746 cmd = "%s rm -rf %s" % (self.fakerootcmd, pkg_dirs)
747 cmd_lines.append('%s || { "%s failed"; exit 1; }' % (cmd, cmd))
748
749 return self.write_script(cmd_lines, 'delete_package_dirs')
750
751 def gen_deploy_target_script(self, args): 678 def gen_deploy_target_script(self, args):
752 """Generate a script which does what devtool deploy-target does 679 """Generate a script which does what devtool deploy-target does
753 680
@@ -785,8 +712,6 @@ class RecipeModified:
785 """Generate a script which does install and deploy""" 712 """Generate a script which does install and deploy"""
786 cmd_lines = ['#!/bin/bash'] 713 cmd_lines = ['#!/bin/bash']
787 714
788 cmd_lines.append(self.gen_delete_package_dirs())
789
790 # . oe-init-build-env $BUILDDIR 715 # . oe-init-build-env $BUILDDIR
791 # Note: Sourcing scripts with arguments requires bash 716 # Note: Sourcing scripts with arguments requires bash
792 cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % ( 717 cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % (
@@ -988,6 +913,13 @@ def ide_setup(args, config, basepath, workspace):
988 recipe_modified.gen_meson_wrapper() 913 recipe_modified.gen_meson_wrapper()
989 ide.setup_modified_recipe( 914 ide.setup_modified_recipe(
990 args, recipe_image, recipe_modified) 915 args, recipe_image, recipe_modified)
916
917 if recipe_modified.debug_build != '1':
918 logger.warn(
919 'Recipe %s is compiled with release build configuration. '
920 'You might want to add DEBUG_BUILD = "1" to %s. '
921 'Note that devtool modify --debug-build can do this automatically.',
922 recipe_modified.name, recipe_modified.bbappend)
991 else: 923 else:
992 raise DevtoolError("Must not end up here.") 924 raise DevtoolError("Must not end up here.")
993 925
@@ -995,6 +927,15 @@ def ide_setup(args, config, basepath, workspace):
995def register_commands(subparsers, context): 927def register_commands(subparsers, context):
996 """Register devtool subcommands from this plugin""" 928 """Register devtool subcommands from this plugin"""
997 929
930 # The ide-sdk command bootstraps the SDK from the bitbake environment before the IDE
931 # configuration is generated. In the case of the eSDK, the bootstrapping is performed
932 # during the installation of the eSDK installer. Running the ide-sdk plugin from an
933 # eSDK installer-based setup would require skipping the bootstrapping and probably
934 # taking some other differences into account when generating the IDE configurations.
935 # This would be possible. But it is not implemented.
936 if context.fixed_setup:
937 return
938
998 global ide_plugins 939 global ide_plugins
999 940
1000 # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins. 941 # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins.
@@ -1015,7 +956,7 @@ def register_commands(subparsers, context):
1015 help='Setup the SDK and configure the IDE') 956 help='Setup the SDK and configure the IDE')
1016 parser_ide_sdk.add_argument( 957 parser_ide_sdk.add_argument(
1017 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n' 958 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n'
1018 'Depending on the --mode paramter different types of SDKs and IDE configurations are generated.') 959 'Depending on the --mode parameter different types of SDKs and IDE configurations are generated.')
1019 parser_ide_sdk.add_argument( 960 parser_ide_sdk.add_argument(
1020 '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified, 961 '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified,
1021 help='Different SDK types are supported:\n' 962 help='Different SDK types are supported:\n'
@@ -1065,6 +1006,4 @@ def register_commands(subparsers, context):
1065 '-p', '--no-preserve', help='Do not preserve existing files', action='store_true') 1006 '-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
1066 parser_ide_sdk.add_argument( 1007 parser_ide_sdk.add_argument(
1067 '--no-check-space', help='Do not check for available space before deploying', action='store_true') 1008 '--no-check-space', help='Do not check for available space before deploying', action='store_true')
1068 parser_ide_sdk.add_argument(
1069 '--debug-build-config', help='Use debug build flags, for example set CMAKE_BUILD_TYPE=Debug', action='store_true')
1070 parser_ide_sdk.set_defaults(func=ide_setup) 1009 parser_ide_sdk.set_defaults(func=ide_setup)
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py
index 18daef30c3..1054960551 100644
--- a/scripts/lib/devtool/menuconfig.py
+++ b/scripts/lib/devtool/menuconfig.py
@@ -23,9 +23,6 @@
23import os 23import os
24import bb 24import bb
25import logging 25import logging
26import argparse
27import re
28import glob
29from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command 26from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command
30from devtool import check_workspace_recipe 27from devtool import check_workspace_recipe
31logger = logging.getLogger('devtool') 28logger = logging.getLogger('devtool')
@@ -34,7 +31,6 @@ def menuconfig(args, config, basepath, workspace):
34 """Entry point for the devtool 'menuconfig' subcommand""" 31 """Entry point for the devtool 'menuconfig' subcommand"""
35 32
36 rd = "" 33 rd = ""
37 kconfigpath = ""
38 pn_src = "" 34 pn_src = ""
39 localfilesdir = "" 35 localfilesdir = ""
40 workspace_dir = "" 36 workspace_dir = ""
@@ -51,7 +47,6 @@ def menuconfig(args, config, basepath, workspace):
51 raise DevtoolError("This recipe does not support menuconfig option") 47 raise DevtoolError("This recipe does not support menuconfig option")
52 48
53 workspace_dir = os.path.join(config.workspace_path,'sources') 49 workspace_dir = os.path.join(config.workspace_path,'sources')
54 kconfigpath = rd.getVar('B')
55 pn_src = os.path.join(workspace_dir,pn) 50 pn_src = os.path.join(workspace_dir,pn)
56 51
57 # add check to see if oe_local_files exists or not 52 # add check to see if oe_local_files exists or not
@@ -70,7 +65,7 @@ def menuconfig(args, config, basepath, workspace):
70 logger.info('Launching menuconfig') 65 logger.info('Launching menuconfig')
71 exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) 66 exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True)
72 fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg') 67 fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg')
73 res = standard._create_kconfig_diff(pn_src,rd,fragment) 68 standard._create_kconfig_diff(pn_src,rd,fragment)
74 69
75 return 0 70 return 0
76 71
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py
index 1d0fe13788..1fd5947c41 100644
--- a/scripts/lib/devtool/standard.py
+++ b/scripts/lib/devtool/standard.py
@@ -18,11 +18,13 @@ import argparse_oe
18import scriptutils 18import scriptutils
19import errno 19import errno
20import glob 20import glob
21import filecmp
22from collections import OrderedDict 21from collections import OrderedDict
22
23from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError 23from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError
24from devtool import parse_recipe 24from devtool import parse_recipe
25 25
26import bb.utils
27
26logger = logging.getLogger('devtool') 28logger = logging.getLogger('devtool')
27 29
28override_branch_prefix = 'devtool-override-' 30override_branch_prefix = 'devtool-override-'
@@ -30,7 +32,8 @@ override_branch_prefix = 'devtool-override-'
30 32
31def add(args, config, basepath, workspace): 33def add(args, config, basepath, workspace):
32 """Entry point for the devtool 'add' subcommand""" 34 """Entry point for the devtool 'add' subcommand"""
33 import bb 35 import bb.data
36 import bb.process
34 import oe.recipeutils 37 import oe.recipeutils
35 38
36 if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: 39 if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri:
@@ -206,7 +209,7 @@ def add(args, config, basepath, workspace):
206 for fn in os.listdir(tempdir): 209 for fn in os.listdir(tempdir):
207 shutil.move(os.path.join(tempdir, fn), recipedir) 210 shutil.move(os.path.join(tempdir, fn), recipedir)
208 else: 211 else:
209 raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout)) 212 raise DevtoolError(f'Failed to create a recipe file for source {source}')
210 attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) 213 attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile))
211 if os.path.exists(attic_recipe): 214 if os.path.exists(attic_recipe):
212 logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) 215 logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe)
@@ -305,6 +308,7 @@ def add(args, config, basepath, workspace):
305 308
306def _check_compatible_recipe(pn, d): 309def _check_compatible_recipe(pn, d):
307 """Check if the recipe is supported by devtool""" 310 """Check if the recipe is supported by devtool"""
311 import bb.data
308 if pn == 'perf': 312 if pn == 'perf':
309 raise DevtoolError("The perf recipe does not actually check out " 313 raise DevtoolError("The perf recipe does not actually check out "
310 "source and thus cannot be supported by this tool", 314 "source and thus cannot be supported by this tool",
@@ -374,7 +378,7 @@ def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None):
374 378
375def _git_ls_tree(repodir, treeish='HEAD', recursive=False): 379def _git_ls_tree(repodir, treeish='HEAD', recursive=False):
376 """List contents of a git treeish""" 380 """List contents of a git treeish"""
377 import bb 381 import bb.process
378 cmd = ['git', 'ls-tree', '-z', treeish] 382 cmd = ['git', 'ls-tree', '-z', treeish]
379 if recursive: 383 if recursive:
380 cmd.append('-r') 384 cmd.append('-r')
@@ -389,7 +393,7 @@ def _git_ls_tree(repodir, treeish='HEAD', recursive=False):
389 393
390def _git_modified(repodir): 394def _git_modified(repodir):
391 """List the difference between HEAD and the index""" 395 """List the difference between HEAD and the index"""
392 import bb 396 import bb.process
393 cmd = ['git', 'status', '--porcelain'] 397 cmd = ['git', 'status', '--porcelain']
394 out, _ = bb.process.run(cmd, cwd=repodir) 398 out, _ = bb.process.run(cmd, cwd=repodir)
395 ret = [] 399 ret = []
@@ -427,8 +431,6 @@ def _ls_tree(directory):
427 431
428def extract(args, config, basepath, workspace): 432def extract(args, config, basepath, workspace):
429 """Entry point for the devtool 'extract' subcommand""" 433 """Entry point for the devtool 'extract' subcommand"""
430 import bb
431
432 tinfoil = setup_tinfoil(basepath=basepath, tracking=True) 434 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
433 if not tinfoil: 435 if not tinfoil:
434 # Error already shown 436 # Error already shown
@@ -451,8 +453,6 @@ def extract(args, config, basepath, workspace):
451 453
452def sync(args, config, basepath, workspace): 454def sync(args, config, basepath, workspace):
453 """Entry point for the devtool 'sync' subcommand""" 455 """Entry point for the devtool 'sync' subcommand"""
454 import bb
455
456 tinfoil = setup_tinfoil(basepath=basepath, tracking=True) 456 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
457 if not tinfoil: 457 if not tinfoil:
458 # Error already shown 458 # Error already shown
@@ -475,9 +475,9 @@ def sync(args, config, basepath, workspace):
475 475
476def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): 476def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False):
477 """Extract sources of a recipe""" 477 """Extract sources of a recipe"""
478 import oe.recipeutils
479 import oe.patch
480 import oe.path 478 import oe.path
479 import bb.data
480 import bb.process
481 481
482 pn = d.getVar('PN') 482 pn = d.getVar('PN')
483 483
@@ -542,6 +542,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
542 tempbasedir = d.getVar('WORKDIR') 542 tempbasedir = d.getVar('WORKDIR')
543 bb.utils.mkdirhier(tempbasedir) 543 bb.utils.mkdirhier(tempbasedir)
544 tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) 544 tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir)
545 appendbackup = None
545 try: 546 try:
546 tinfoil.logger.setLevel(logging.WARNING) 547 tinfoil.logger.setLevel(logging.WARNING)
547 548
@@ -552,7 +553,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
552 appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') 553 appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak')
553 shutil.copyfile(appendfile, appendbackup) 554 shutil.copyfile(appendfile, appendbackup)
554 else: 555 else:
555 appendbackup = None
556 bb.utils.mkdirhier(os.path.dirname(appendfile)) 556 bb.utils.mkdirhier(os.path.dirname(appendfile))
557 logger.debug('writing append file %s' % appendfile) 557 logger.debug('writing append file %s' % appendfile)
558 with open(appendfile, 'a') as f: 558 with open(appendfile, 'a') as f:
@@ -625,7 +625,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
625 srcsubdir = f.read() 625 srcsubdir = f.read()
626 except FileNotFoundError as e: 626 except FileNotFoundError as e:
627 raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) 627 raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e))
628 srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir')) 628 srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir', os.path.relpath(d.getVar('UNPACKDIR'), d.getVar('WORKDIR'))))
629 629
630 # Check if work-shared is empty, if yes 630 # Check if work-shared is empty, if yes
631 # find source and copy to work-shared 631 # find source and copy to work-shared
@@ -678,8 +678,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
678 678
679def _add_md5(config, recipename, filename): 679def _add_md5(config, recipename, filename):
680 """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" 680 """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace"""
681 import bb.utils
682
683 def addfile(fn): 681 def addfile(fn):
684 md5 = bb.utils.md5_file(fn) 682 md5 = bb.utils.md5_file(fn)
685 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: 683 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f:
@@ -698,7 +696,6 @@ def _add_md5(config, recipename, filename):
698def _check_preserve(config, recipename): 696def _check_preserve(config, recipename):
699 """Check if a file was manually changed and needs to be saved in 'attic' 697 """Check if a file was manually changed and needs to be saved in 'attic'
700 directory""" 698 directory"""
701 import bb.utils
702 origfile = os.path.join(config.workspace_path, '.devtool_md5') 699 origfile = os.path.join(config.workspace_path, '.devtool_md5')
703 newfile = os.path.join(config.workspace_path, '.devtool_md5_new') 700 newfile = os.path.join(config.workspace_path, '.devtool_md5_new')
704 preservepath = os.path.join(config.workspace_path, 'attic', recipename) 701 preservepath = os.path.join(config.workspace_path, 'attic', recipename)
@@ -729,36 +726,36 @@ def _check_preserve(config, recipename):
729 726
730def get_staging_kver(srcdir): 727def get_staging_kver(srcdir):
731 # Kernel version from work-shared 728 # Kernel version from work-shared
732 kerver = [] 729 import itertools
733 staging_kerVer="" 730 try:
734 if os.path.exists(srcdir) and os.listdir(srcdir):
735 with open(os.path.join(srcdir, "Makefile")) as f: 731 with open(os.path.join(srcdir, "Makefile")) as f:
736 version = [next(f) for x in range(5)][1:4] 732 # Take VERSION, PATCHLEVEL, SUBLEVEL from lines 1, 2, 3
737 for word in version: 733 return ".".join(line.rstrip().split('= ')[1] for line in itertools.islice(f, 1, 4))
738 kerver.append(word.split('= ')[1].split('\n')[0]) 734 except FileNotFoundError:
739 staging_kerVer = ".".join(kerver) 735 return ""
740 return staging_kerVer
741 736
742def get_staging_kbranch(srcdir): 737def get_staging_kbranch(srcdir):
738 import bb.process
743 staging_kbranch = "" 739 staging_kbranch = ""
744 if os.path.exists(srcdir) and os.listdir(srcdir): 740 if os.path.exists(srcdir) and os.listdir(srcdir):
745 (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir) 741 (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir)
746 staging_kbranch = "".join(branch.split('\n')[0]) 742 staging_kbranch = "".join(branch.split('\n')[0])
747 return staging_kbranch 743 return staging_kbranch
748 744
749def get_real_srctree(srctree, s, workdir): 745def get_real_srctree(srctree, s, unpackdir):
750 # Check that recipe isn't using a shared workdir 746 # Check that recipe isn't using a shared workdir
751 s = os.path.abspath(s) 747 s = os.path.abspath(s)
752 workdir = os.path.abspath(workdir) 748 unpackdir = os.path.abspath(unpackdir)
753 if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: 749 if s.startswith(unpackdir) and s != unpackdir and os.path.dirname(s) != unpackdir:
754 # Handle if S is set to a subdirectory of the source 750 # Handle if S is set to a subdirectory of the source
755 srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] 751 srcsubdir = os.path.relpath(s, unpackdir).split(os.sep, 1)[1]
756 srctree = os.path.join(srctree, srcsubdir) 752 srctree = os.path.join(srctree, srcsubdir)
757 return srctree 753 return srctree
758 754
759def modify(args, config, basepath, workspace): 755def modify(args, config, basepath, workspace):
760 """Entry point for the devtool 'modify' subcommand""" 756 """Entry point for the devtool 'modify' subcommand"""
761 import bb 757 import bb.data
758 import bb.process
762 import oe.recipeutils 759 import oe.recipeutils
763 import oe.patch 760 import oe.patch
764 import oe.path 761 import oe.path
@@ -814,10 +811,8 @@ def modify(args, config, basepath, workspace):
814 staging_kbranch = get_staging_kbranch(srcdir) 811 staging_kbranch = get_staging_kbranch(srcdir)
815 if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): 812 if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch):
816 oe.path.copyhardlinktree(srcdir, srctree) 813 oe.path.copyhardlinktree(srcdir, srctree)
817 workdir = rd.getVar('WORKDIR')
818 unpackdir = rd.getVar('UNPACKDIR') 814 unpackdir = rd.getVar('UNPACKDIR')
819 srcsubdir = rd.getVar('S') 815 srcsubdir = rd.getVar('S')
820 localfilesdir = os.path.join(srctree, 'oe-local-files')
821 816
822 # Add locally copied files to gitignore as we add back to the metadata directly 817 # Add locally copied files to gitignore as we add back to the metadata directly
823 local_files = oe.recipeutils.get_recipe_local_files(rd) 818 local_files = oe.recipeutils.get_recipe_local_files(rd)
@@ -912,7 +907,7 @@ def modify(args, config, basepath, workspace):
912 907
913 # Need to grab this here in case the source is within a subdirectory 908 # Need to grab this here in case the source is within a subdirectory
914 srctreebase = srctree 909 srctreebase = srctree
915 srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) 910 srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR'))
916 911
917 bb.utils.mkdirhier(os.path.dirname(appendfile)) 912 bb.utils.mkdirhier(os.path.dirname(appendfile))
918 with open(appendfile, 'w') as f: 913 with open(appendfile, 'w') as f:
@@ -952,13 +947,6 @@ def modify(args, config, basepath, workspace):
952 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) 947 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
953 948
954 if bb.data.inherits_class('kernel', rd): 949 if bb.data.inherits_class('kernel', rd):
955 f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout '
956 'do_fetch do_unpack do_kernel_configcheck"\n')
957 f.write('\ndo_patch[noexec] = "1"\n')
958 f.write('\ndo_configure:append() {\n'
959 ' cp ${B}/.config ${S}/.config.baseline\n'
960 ' ln -sfT ${B}/.config ${S}/.config.new\n'
961 '}\n')
962 f.write('\ndo_kernel_configme:prepend() {\n' 950 f.write('\ndo_kernel_configme:prepend() {\n'
963 ' if [ -e ${S}/.config ]; then\n' 951 ' if [ -e ${S}/.config ]; then\n'
964 ' mv ${S}/.config ${S}/.config.old\n' 952 ' mv ${S}/.config ${S}/.config.old\n'
@@ -982,6 +970,8 @@ def modify(args, config, basepath, workspace):
982 if branch == args.branch: 970 if branch == args.branch:
983 continue 971 continue
984 f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) 972 f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch])))
973 if args.debug_build:
974 f.write('\nDEBUG_BUILD = "1"\n')
985 975
986 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) 976 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
987 977
@@ -1026,6 +1016,7 @@ def rename(args, config, basepath, workspace):
1026 origfnver = '' 1016 origfnver = ''
1027 1017
1028 recipefilemd5 = None 1018 recipefilemd5 = None
1019 newrecipefilemd5 = None
1029 tinfoil = setup_tinfoil(basepath=basepath, tracking=True) 1020 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
1030 try: 1021 try:
1031 rd = parse_recipe(config, tinfoil, args.recipename, True) 1022 rd = parse_recipe(config, tinfoil, args.recipename, True)
@@ -1103,6 +1094,7 @@ def rename(args, config, basepath, workspace):
1103 1094
1104 # Rename source tree if it's the default path 1095 # Rename source tree if it's the default path
1105 appendmd5 = None 1096 appendmd5 = None
1097 newappendmd5 = None
1106 if not args.no_srctree: 1098 if not args.no_srctree:
1107 srctree = workspace[args.recipename]['srctree'] 1099 srctree = workspace[args.recipename]['srctree']
1108 if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): 1100 if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename):
@@ -1191,7 +1183,7 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre
1191 """Get initial and update rev of a recipe. These are the start point of the 1183 """Get initial and update rev of a recipe. These are the start point of the
1192 whole patchset and start point for the patches to be re-generated/updated. 1184 whole patchset and start point for the patches to be re-generated/updated.
1193 """ 1185 """
1194 import bb 1186 import bb.process
1195 1187
1196 # Get current branch 1188 # Get current branch
1197 stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', 1189 stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD',
@@ -1317,6 +1309,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None):
1317 """ 1309 """
1318 import oe.recipeutils 1310 import oe.recipeutils
1319 from oe.patch import GitApplyTree 1311 from oe.patch import GitApplyTree
1312 import bb.process
1320 updated = OrderedDict() 1313 updated = OrderedDict()
1321 added = OrderedDict() 1314 added = OrderedDict()
1322 seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') 1315 seqpatch_re = re.compile('^([0-9]{4}-)?(.+)')
@@ -1338,6 +1331,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None):
1338 # values, but they ought to be anyway... 1331 # values, but they ought to be anyway...
1339 new_basename = seqpatch_re.match(new_patch).group(2) 1332 new_basename = seqpatch_re.match(new_patch).group(2)
1340 match_name = None 1333 match_name = None
1334 old_patch = None
1341 for old_patch in existing_patches: 1335 for old_patch in existing_patches:
1342 old_basename = seqpatch_re.match(old_patch).group(2) 1336 old_basename = seqpatch_re.match(old_patch).group(2)
1343 old_basename_splitext = os.path.splitext(old_basename) 1337 old_basename_splitext = os.path.splitext(old_basename)
@@ -1386,6 +1380,7 @@ def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None):
1386 1380
1387def _create_kconfig_diff(srctree, rd, outfile): 1381def _create_kconfig_diff(srctree, rd, outfile):
1388 """Create a kconfig fragment""" 1382 """Create a kconfig fragment"""
1383 import bb.process
1389 # Only update config fragment if both config files exist 1384 # Only update config fragment if both config files exist
1390 orig_config = os.path.join(srctree, '.config.baseline') 1385 orig_config = os.path.join(srctree, '.config.baseline')
1391 new_config = os.path.join(srctree, '.config.new') 1386 new_config = os.path.join(srctree, '.config.new')
@@ -1423,6 +1418,8 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1423 - for removed dict, the absolute path to the existing file in recipe space 1418 - for removed dict, the absolute path to the existing file in recipe space
1424 """ 1419 """
1425 import oe.recipeutils 1420 import oe.recipeutils
1421 import bb.data
1422 import bb.process
1426 1423
1427 # Find out local files (SRC_URI files that exist in the "recipe space"). 1424 # Find out local files (SRC_URI files that exist in the "recipe space").
1428 # Local files that reside in srctree are not included in patch generation. 1425 # Local files that reside in srctree are not included in patch generation.
@@ -1481,7 +1478,7 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1481 added[fragment_fn] = {} 1478 added[fragment_fn] = {}
1482 else: 1479 else:
1483 if fragment_fn in updated: 1480 if fragment_fn in updated:
1484 revoved[fragment_fn] = updated[fragment_fn] 1481 removed[fragment_fn] = updated[fragment_fn]
1485 del updated[fragment_fn] 1482 del updated[fragment_fn]
1486 1483
1487 # Special handling for cml1, ccmake, etc bbclasses that generated 1484 # Special handling for cml1, ccmake, etc bbclasses that generated
@@ -1514,7 +1511,7 @@ def _determine_files_dir(rd):
1514 1511
1515def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): 1512def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None):
1516 """Implement the 'srcrev' mode of update-recipe""" 1513 """Implement the 'srcrev' mode of update-recipe"""
1517 import bb 1514 import bb.process
1518 import oe.recipeutils 1515 import oe.recipeutils
1519 1516
1520 dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' 1517 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
@@ -1552,6 +1549,7 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1552 local_files_dir = tempfile.mkdtemp(dir=tempdir) 1549 local_files_dir = tempfile.mkdtemp(dir=tempdir)
1553 srctreebase = workspace[recipename]['srctreebase'] 1550 srctreebase = workspace[recipename]['srctreebase']
1554 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) 1551 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
1552 removedentries = {}
1555 if not no_remove: 1553 if not no_remove:
1556 # Find list of existing patches in recipe file 1554 # Find list of existing patches in recipe file
1557 patches_dir = tempfile.mkdtemp(dir=tempdir) 1555 patches_dir = tempfile.mkdtemp(dir=tempdir)
@@ -1615,7 +1613,6 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1615 1613
1616def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): 1614def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False):
1617 """Implement the 'patch' mode of update-recipe""" 1615 """Implement the 'patch' mode of update-recipe"""
1618 import bb
1619 import oe.recipeutils 1616 import oe.recipeutils
1620 1617
1621 recipefile = rd.getVar('FILE') 1618 recipefile = rd.getVar('FILE')
@@ -1729,6 +1726,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1729 for basepath, param in upd_p.items(): 1726 for basepath, param in upd_p.items():
1730 path = param['path'] 1727 path = param['path']
1731 patchdir = param.get('patchdir', ".") 1728 patchdir = param.get('patchdir', ".")
1729 patchdir_param = {}
1732 if patchdir != "." : 1730 if patchdir != "." :
1733 patchdir_param = dict(patchdir_params) 1731 patchdir_param = dict(patchdir_params)
1734 if patchdir_param: 1732 if patchdir_param:
@@ -1794,6 +1792,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1794 1792
1795def _guess_recipe_update_mode(srctree, rdata): 1793def _guess_recipe_update_mode(srctree, rdata):
1796 """Guess the recipe update mode to use""" 1794 """Guess the recipe update mode to use"""
1795 import bb.process
1797 src_uri = (rdata.getVar('SRC_URI') or '').split() 1796 src_uri = (rdata.getVar('SRC_URI') or '').split()
1798 git_uris = [uri for uri in src_uri if uri.startswith('git://')] 1797 git_uris = [uri for uri in src_uri if uri.startswith('git://')]
1799 if not git_uris: 1798 if not git_uris:
@@ -1815,6 +1814,8 @@ def _guess_recipe_update_mode(srctree, rdata):
1815 return 'patch' 1814 return 'patch'
1816 1815
1817def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): 1816def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False):
1817 import bb.data
1818 import bb.process
1818 srctree = workspace[recipename]['srctree'] 1819 srctree = workspace[recipename]['srctree']
1819 if mode == 'auto': 1820 if mode == 'auto':
1820 mode = _guess_recipe_update_mode(srctree, rd) 1821 mode = _guess_recipe_update_mode(srctree, rd)
@@ -1937,6 +1938,7 @@ def status(args, config, basepath, workspace):
1937 1938
1938def _reset(recipes, no_clean, remove_work, config, basepath, workspace): 1939def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
1939 """Reset one or more recipes""" 1940 """Reset one or more recipes"""
1941 import bb.process
1940 import oe.path 1942 import oe.path
1941 1943
1942 def clean_preferred_provider(pn, layerconf_path): 1944 def clean_preferred_provider(pn, layerconf_path):
@@ -1949,7 +1951,7 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
1949 lines = f.readlines() 1951 lines = f.readlines()
1950 with open(new_layerconf_file, 'a') as nf: 1952 with open(new_layerconf_file, 'a') as nf:
1951 for line in lines: 1953 for line in lines:
1952 pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + pn + r'"$' 1954 pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + re.escape(pn) + r'"$'
1953 if not re.match(pprovider_exp, line): 1955 if not re.match(pprovider_exp, line):
1954 nf.write(line) 1956 nf.write(line)
1955 else: 1957 else:
@@ -2040,8 +2042,6 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
2040 2042
2041def reset(args, config, basepath, workspace): 2043def reset(args, config, basepath, workspace):
2042 """Entry point for the devtool 'reset' subcommand""" 2044 """Entry point for the devtool 'reset' subcommand"""
2043 import bb
2044 import shutil
2045 2045
2046 recipes = "" 2046 recipes = ""
2047 2047
@@ -2320,6 +2320,7 @@ def register_commands(subparsers, context):
2320 parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') 2320 parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")')
2321 parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') 2321 parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
2322 parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") 2322 parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true")
2323 parser_modify.add_argument('--debug-build', action="store_true", help='Add DEBUG_BUILD = "1" to the modified recipe')
2323 parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) 2324 parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup)
2324 2325
2325 parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', 2326 parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe',
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py
index fa5b8ef3c7..d9aca6e2db 100644
--- a/scripts/lib/devtool/upgrade.py
+++ b/scripts/lib/devtool/upgrade.py
@@ -32,7 +32,7 @@ def _run(cmd, cwd=''):
32 32
33def _get_srctree(tmpdir): 33def _get_srctree(tmpdir):
34 srctree = tmpdir 34 srctree = tmpdir
35 dirs = scriptutils.filter_src_subdirs(tmpdir) 35 dirs = os.listdir(tmpdir)
36 if len(dirs) == 1: 36 if len(dirs) == 1:
37 srctree = os.path.join(tmpdir, dirs[0]) 37 srctree = os.path.join(tmpdir, dirs[0])
38 else: 38 else:
@@ -76,19 +76,19 @@ def _rename_recipe_dirs(oldpv, newpv, path):
76 bb.utils.rename(os.path.join(path, oldfile), 76 bb.utils.rename(os.path.join(path, oldfile),
77 os.path.join(path, newfile)) 77 os.path.join(path, newfile))
78 78
79def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path): 79def _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path):
80 oldrecipe = os.path.basename(oldrecipe) 80 oldrecipe = os.path.basename(oldrecipe)
81 if oldrecipe.endswith('_%s.bb' % oldpv): 81 if oldrecipe.endswith('_%s.bb' % oldpv):
82 newrecipe = '%s_%s.bb' % (bpn, newpv) 82 newrecipe = '%s_%s.bb' % (pn, newpv)
83 if oldrecipe != newrecipe: 83 if oldrecipe != newrecipe:
84 shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) 84 shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe))
85 else: 85 else:
86 newrecipe = oldrecipe 86 newrecipe = oldrecipe
87 return os.path.join(path, newrecipe) 87 return os.path.join(path, newrecipe)
88 88
89def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path): 89def _rename_recipe_files(oldrecipe, pn, oldpv, newpv, path):
90 _rename_recipe_dirs(oldpv, newpv, path) 90 _rename_recipe_dirs(oldpv, newpv, path)
91 return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path) 91 return _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path)
92 92
93def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): 93def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d):
94 """Writes an append file""" 94 """Writes an append file"""
@@ -169,6 +169,7 @@ def _get_uri(rd):
169 169
170def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd): 170def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd):
171 """Extract sources of a recipe with a new version""" 171 """Extract sources of a recipe with a new version"""
172 import oe.patch
172 173
173 def __run(cmd): 174 def __run(cmd):
174 """Simple wrapper which calls _run with srctree as cwd""" 175 """Simple wrapper which calls _run with srctree as cwd"""
@@ -187,9 +188,9 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
187 if uri.startswith('git://') or uri.startswith('gitsm://'): 188 if uri.startswith('git://') or uri.startswith('gitsm://'):
188 __run('git fetch') 189 __run('git fetch')
189 __run('git checkout %s' % rev) 190 __run('git checkout %s' % rev)
190 __run('git tag -f devtool-base-new') 191 __run('git tag -f --no-sign devtool-base-new')
191 __run('git submodule update --recursive') 192 __run('git submodule update --recursive')
192 __run('git submodule foreach \'git tag -f devtool-base-new\'') 193 __run('git submodule foreach \'git tag -f --no-sign devtool-base-new\'')
193 (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'') 194 (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'')
194 paths += [os.path.join(srctree, p) for p in stdout.splitlines()] 195 paths += [os.path.join(srctree, p) for p in stdout.splitlines()]
195 checksums = {} 196 checksums = {}
@@ -256,7 +257,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
256 useroptions = [] 257 useroptions = []
257 oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) 258 oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
258 __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) 259 __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv))
259 __run('git tag -f devtool-base-%s' % newpv) 260 __run('git tag -f --no-sign devtool-base-%s' % newpv)
260 261
261 revs = {} 262 revs = {}
262 for path in paths: 263 for path in paths:
@@ -335,19 +336,19 @@ def _add_license_diff_to_recipe(path, diff):
335def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): 336def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
336 """Creates the new recipe under workspace""" 337 """Creates the new recipe under workspace"""
337 338
338 bpn = rd.getVar('BPN') 339 pn = rd.getVar('PN')
339 path = os.path.join(workspace, 'recipes', bpn) 340 path = os.path.join(workspace, 'recipes', pn)
340 bb.utils.mkdirhier(path) 341 bb.utils.mkdirhier(path)
341 copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) 342 copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True)
342 if not copied: 343 if not copied:
343 raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn) 344 raise DevtoolError('Internal error - no files were copied for recipe %s' % pn)
344 logger.debug('Copied %s to %s' % (copied, path)) 345 logger.debug('Copied %s to %s' % (copied, path))
345 346
346 oldpv = rd.getVar('PV') 347 oldpv = rd.getVar('PV')
347 if not newpv: 348 if not newpv:
348 newpv = oldpv 349 newpv = oldpv
349 origpath = rd.getVar('FILE') 350 origpath = rd.getVar('FILE')
350 fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path) 351 fullpath = _rename_recipe_files(origpath, pn, oldpv, newpv, path)
351 logger.debug('Upgraded %s => %s' % (origpath, fullpath)) 352 logger.debug('Upgraded %s => %s' % (origpath, fullpath))
352 353
353 newvalues = {} 354 newvalues = {}
@@ -534,6 +535,15 @@ def _generate_license_diff(old_licenses, new_licenses):
534 diff = diff + line 535 diff = diff + line
535 return diff 536 return diff
536 537
538def _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil):
539 tasks = []
540 for task in (rd.getVar('RECIPE_UPGRADE_EXTRA_TASKS') or '').split():
541 logger.info('Running extra recipe upgrade task: %s' % task)
542 res = tinfoil.build_targets(pn, task, handle_events=True)
543
544 if not res:
545 raise DevtoolError('Running extra recipe upgrade task %s for %s failed' % (task, pn))
546
537def upgrade(args, config, basepath, workspace): 547def upgrade(args, config, basepath, workspace):
538 """Entry point for the devtool 'upgrade' subcommand""" 548 """Entry point for the devtool 'upgrade' subcommand"""
539 549
@@ -561,7 +571,7 @@ def upgrade(args, config, basepath, workspace):
561 else: 571 else:
562 srctree = standard.get_default_srctree(config, pn) 572 srctree = standard.get_default_srctree(config, pn)
563 573
564 srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) 574 srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR'))
565 575
566 # try to automatically discover latest version and revision if not provided on command line 576 # try to automatically discover latest version and revision if not provided on command line
567 if not args.version and not args.srcrev: 577 if not args.version and not args.srcrev:
@@ -601,7 +611,7 @@ def upgrade(args, config, basepath, workspace):
601 license_diff = _generate_license_diff(old_licenses, new_licenses) 611 license_diff = _generate_license_diff(old_licenses, new_licenses)
602 rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) 612 rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
603 except (bb.process.CmdError, DevtoolError) as e: 613 except (bb.process.CmdError, DevtoolError) as e:
604 recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN')) 614 recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('PN'))
605 _upgrade_error(e, recipedir, srctree, args.keep_failure) 615 _upgrade_error(e, recipedir, srctree, args.keep_failure)
606 standard._add_md5(config, pn, os.path.dirname(rf)) 616 standard._add_md5(config, pn, os.path.dirname(rf))
607 617
@@ -609,6 +619,8 @@ def upgrade(args, config, basepath, workspace):
609 copied, config.workspace_path, rd) 619 copied, config.workspace_path, rd)
610 standard._add_md5(config, pn, af) 620 standard._add_md5(config, pn, af)
611 621
622 _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil)
623
612 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) 624 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
613 625
614 logger.info('Upgraded source extracted to %s' % srctree) 626 logger.info('Upgraded source extracted to %s' % srctree)
@@ -643,18 +655,28 @@ def latest_version(args, config, basepath, workspace):
643 return 0 655 return 0
644 656
645def check_upgrade_status(args, config, basepath, workspace): 657def check_upgrade_status(args, config, basepath, workspace):
658 def _print_status(recipe):
659 print("{:25} {:15} {:15} {} {} {}".format( recipe['pn'],
660 recipe['cur_ver'],
661 recipe['status'] if recipe['status'] != 'UPDATE' else (recipe['next_ver'] if not recipe['next_ver'].endswith("new-commits-available") else "new commits"),
662 recipe['maintainer'],
663 recipe['revision'] if recipe['revision'] != 'N/A' else "",
664 "cannot be updated due to: %s" %(recipe['no_upgrade_reason']) if recipe['no_upgrade_reason'] else ""))
646 if not args.recipe: 665 if not args.recipe:
647 logger.info("Checking the upstream status for all recipes may take a few minutes") 666 logger.info("Checking the upstream status for all recipes may take a few minutes")
648 results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) 667 results = oe.recipeutils.get_recipe_upgrade_status(args.recipe)
649 for result in results: 668 for recipegroup in results:
650 # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason 669 upgrades = [r for r in recipegroup if r['status'] != 'MATCH']
651 if args.all or result[1] != 'MATCH': 670 currents = [r for r in recipegroup if r['status'] == 'MATCH']
652 print("{:25} {:15} {:15} {} {} {}".format( result[0], 671 if len(upgrades) > 1:
653 result[2], 672 print("These recipes need to be upgraded together {")
654 result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"), 673 for r in sorted(upgrades, key=lambda r:r['pn']):
655 result[4], 674 _print_status(r)
656 result[5] if result[5] != 'N/A' else "", 675 if len(upgrades) > 1:
657 "cannot be updated due to: %s" %(result[6]) if result[6] else "")) 676 print("}")
677 for r in currents:
678 if args.all:
679 _print_status(r)
658 680
659def register_commands(subparsers, context): 681def register_commands(subparsers, context):
660 """Register devtool subcommands from this plugin""" 682 """Register devtool subcommands from this plugin"""
diff --git a/scripts/lib/devtool/utilcmds.py b/scripts/lib/devtool/utilcmds.py
index 964817766b..bf39f71b11 100644
--- a/scripts/lib/devtool/utilcmds.py
+++ b/scripts/lib/devtool/utilcmds.py
@@ -64,7 +64,7 @@ def configure_help(args, config, basepath, workspace):
64 b = rd.getVar('B') 64 b = rd.getVar('B')
65 s = rd.getVar('S') 65 s = rd.getVar('S')
66 configurescript = os.path.join(s, 'configure') 66 configurescript = os.path.join(s, 'configure')
67 confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or []) 67 confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (bb.build.listtasks(rd))
68 configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') 68 configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '')
69 extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') 69 extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '')
70 extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') 70 extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '')
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py
index 10945d6008..041d79f162 100644
--- a/scripts/lib/recipetool/append.py
+++ b/scripts/lib/recipetool/append.py
@@ -317,7 +317,7 @@ def appendsrc(args, files, rd, extralines=None):
317 import oe.recipeutils 317 import oe.recipeutils
318 318
319 srcdir = rd.getVar('S') 319 srcdir = rd.getVar('S')
320 workdir = rd.getVar('WORKDIR') 320 unpackdir = rd.getVar('UNPACKDIR')
321 321
322 import bb.fetch 322 import bb.fetch
323 simplified = {} 323 simplified = {}
@@ -336,10 +336,10 @@ def appendsrc(args, files, rd, extralines=None):
336 src_destdir = os.path.dirname(srcfile) 336 src_destdir = os.path.dirname(srcfile)
337 if not args.use_workdir: 337 if not args.use_workdir:
338 if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): 338 if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'):
339 srcdir = os.path.join(workdir, 'git') 339 srcdir = os.path.join(unpackdir, rd.getVar('BB_GIT_DEFAULT_DESTSUFFIX'))
340 if not bb.data.inherits_class('kernel-yocto', rd): 340 if not bb.data.inherits_class('kernel-yocto', rd):
341 logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') 341 logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${UNPACKDIR}/${BB_GIT_DEFAULT_DESTSUFFIX}')
342 src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir) 342 src_destdir = os.path.join(os.path.relpath(srcdir, unpackdir), src_destdir)
343 src_destdir = os.path.normpath(src_destdir) 343 src_destdir = os.path.normpath(src_destdir)
344 344
345 if src_destdir and src_destdir != '.': 345 if src_destdir and src_destdir != '.':
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py
index 8e9ff38db6..ef0ba974a9 100644
--- a/scripts/lib/recipetool/create.py
+++ b/scripts/lib/recipetool/create.py
@@ -18,6 +18,8 @@ from urllib.parse import urlparse, urldefrag, urlsplit
18import hashlib 18import hashlib
19import bb.fetch2 19import bb.fetch2
20logger = logging.getLogger('recipetool') 20logger = logging.getLogger('recipetool')
21from oe.license import tidy_licenses
22from oe.license_finder import find_licenses
21 23
22tinfoil = None 24tinfoil = None
23plugins = None 25plugins = None
@@ -528,7 +530,7 @@ def create_recipe(args):
528 if ftmpdir and args.keep_temp: 530 if ftmpdir and args.keep_temp:
529 logger.info('Fetch temp directory is %s' % ftmpdir) 531 logger.info('Fetch temp directory is %s' % ftmpdir)
530 532
531 dirlist = scriptutils.filter_src_subdirs(srctree) 533 dirlist = os.listdir(srctree)
532 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) 534 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist))
533 if len(dirlist) == 1: 535 if len(dirlist) == 1:
534 singleitem = os.path.join(srctree, dirlist[0]) 536 singleitem = os.path.join(srctree, dirlist[0])
@@ -637,7 +639,6 @@ def create_recipe(args):
637 if len(splitline) > 1: 639 if len(splitline) > 1:
638 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): 640 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]):
639 srcuri = reformat_git_uri(splitline[1]) + ';branch=master' 641 srcuri = reformat_git_uri(splitline[1]) + ';branch=master'
640 srcsubdir = 'git'
641 break 642 break
642 643
643 if args.src_subdir: 644 if args.src_subdir:
@@ -735,7 +736,7 @@ def create_recipe(args):
735 if srcsubdir and not args.binary: 736 if srcsubdir and not args.binary:
736 # (for binary packages we explicitly specify subdir= when fetching to 737 # (for binary packages we explicitly specify subdir= when fetching to
737 # match the default value of S, so we don't need to set it in that case) 738 # match the default value of S, so we don't need to set it in that case)
738 lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) 739 lines_before.append('S = "${UNPACKDIR}/%s"' % srcsubdir)
739 lines_before.append('') 740 lines_before.append('')
740 741
741 if pkgarch: 742 if pkgarch:
@@ -764,6 +765,7 @@ def create_recipe(args):
764 extrafiles = extravalues.pop('extrafiles', {}) 765 extrafiles = extravalues.pop('extrafiles', {})
765 extra_pn = extravalues.pop('PN', None) 766 extra_pn = extravalues.pop('PN', None)
766 extra_pv = extravalues.pop('PV', None) 767 extra_pv = extravalues.pop('PV', None)
768 run_tasks = extravalues.pop('run_tasks', "").split()
767 769
768 if extra_pv and not realpv: 770 if extra_pv and not realpv:
769 realpv = extra_pv 771 realpv = extra_pv
@@ -824,7 +826,8 @@ def create_recipe(args):
824 extraoutdir = os.path.join(os.path.dirname(outfile), pn) 826 extraoutdir = os.path.join(os.path.dirname(outfile), pn)
825 bb.utils.mkdirhier(extraoutdir) 827 bb.utils.mkdirhier(extraoutdir)
826 for destfn, extrafile in extrafiles.items(): 828 for destfn, extrafile in extrafiles.items():
827 shutil.move(extrafile, os.path.join(extraoutdir, destfn)) 829 fn = destfn.format(pn=pn, pv=realpv)
830 shutil.move(extrafile, os.path.join(extraoutdir, fn))
828 831
829 lines = lines_before 832 lines = lines_before
830 lines_before = [] 833 lines_before = []
@@ -839,7 +842,7 @@ def create_recipe(args):
839 line = line.replace(realpv, '${PV}') 842 line = line.replace(realpv, '${PV}')
840 if pn: 843 if pn:
841 line = line.replace(pn, '${BPN}') 844 line = line.replace(pn, '${BPN}')
842 if line == 'S = "${WORKDIR}/${BPN}-${PV}"': 845 if line == 'S = "${UNPACKDIR}/${BPN}-${PV}"' or 'tmp-recipetool-' in line:
843 skipblank = True 846 skipblank = True
844 continue 847 continue
845 elif line.startswith('SRC_URI = '): 848 elif line.startswith('SRC_URI = '):
@@ -917,6 +920,10 @@ def create_recipe(args):
917 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) 920 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool)
918 tinfoil.modified_files() 921 tinfoil.modified_files()
919 922
923 for task in run_tasks:
924 logger.info("Running task %s" % task)
925 tinfoil.build_file_sync(outfile, task)
926
920 if tempsrc: 927 if tempsrc:
921 if args.keep_temp: 928 if args.keep_temp:
922 logger.info('Preserving temporary directory %s' % tempsrc) 929 logger.info('Preserving temporary directory %s' % tempsrc)
@@ -944,23 +951,13 @@ def fixup_license(value):
944 return '(' + value + ')' 951 return '(' + value + ')'
945 return value 952 return value
946 953
947def tidy_licenses(value):
948 """Flat, split and sort licenses"""
949 from oe.license import flattened_licenses
950 def _choose(a, b):
951 str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold)
952 return ["(%s | %s)" % (str_a, str_b)]
953 if not isinstance(value, str):
954 value = " & ".join(value)
955 return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold)
956
957def handle_license_vars(srctree, lines_before, handled, extravalues, d): 954def handle_license_vars(srctree, lines_before, handled, extravalues, d):
958 lichandled = [x for x in handled if x[0] == 'license'] 955 lichandled = [x for x in handled if x[0] == 'license']
959 if lichandled: 956 if lichandled:
960 # Someone else has already handled the license vars, just return their value 957 # Someone else has already handled the license vars, just return their value
961 return lichandled[0][1] 958 return lichandled[0][1]
962 959
963 licvalues = guess_license(srctree, d) 960 licvalues = find_licenses(srctree, d)
964 licenses = [] 961 licenses = []
965 lic_files_chksum = [] 962 lic_files_chksum = []
966 lic_unknown = [] 963 lic_unknown = []
@@ -1040,222 +1037,9 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
1040 handled.append(('license', licvalues)) 1037 handled.append(('license', licvalues))
1041 return licvalues 1038 return licvalues
1042 1039
1043def get_license_md5sums(d, static_only=False, linenumbers=False):
1044 import bb.utils
1045 import csv
1046 md5sums = {}
1047 if not static_only and not linenumbers:
1048 # Gather md5sums of license files in common license dir
1049 commonlicdir = d.getVar('COMMON_LICENSE_DIR')
1050 for fn in os.listdir(commonlicdir):
1051 md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn))
1052 md5sums[md5value] = fn
1053
1054 # The following were extracted from common values in various recipes
1055 # (double checking the license against the license file itself, not just
1056 # the LICENSE value in the recipe)
1057
1058 # Read license md5sums from csv file
1059 scripts_path = os.path.dirname(os.path.realpath(__file__))
1060 for path in (d.getVar('BBPATH').split(':')
1061 + [os.path.join(scripts_path, '..', '..')]):
1062 csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv')
1063 if os.path.isfile(csv_path):
1064 with open(csv_path, newline='') as csv_file:
1065 fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5']
1066 reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames)
1067 for row in reader:
1068 if linenumbers:
1069 md5sums[row['md5sum']] = (
1070 row['license'], row['beginline'], row['endline'], row['md5'])
1071 else:
1072 md5sums[row['md5sum']] = row['license']
1073
1074 return md5sums
1075
1076def crunch_known_licenses(d):
1077 '''
1078 Calculate the MD5 checksums for the crunched versions of all common
1079 licenses. Also add additional known checksums.
1080 '''
1081
1082 crunched_md5sums = {}
1083
1084 # common licenses
1085 crunched_md5sums['ad4e9d34a2e966dfe9837f18de03266d'] = 'GFDL-1.1-only'
1086 crunched_md5sums['d014fb11a34eb67dc717fdcfc97e60ed'] = 'GFDL-1.2-only'
1087 crunched_md5sums['e020ca655b06c112def28e597ab844f1'] = 'GFDL-1.3-only'
1088
1089 # The following two were gleaned from the "forever" npm package
1090 crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC'
1091 # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt
1092 crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause'
1093 # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE
1094 crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPL-2.0-only'
1095 # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
1096 crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPL-2.0-only'
1097 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1
1098 crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPL-2.1-only'
1099 # unixODBC-2.3.4 COPYING
1100 crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPL-2.1-only'
1101 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3
1102 crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPL-3.0-only'
1103 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10
1104 crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0'
1105
1106 # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD
1107 crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause'
1108 # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE
1109 crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause'
1110 # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE
1111 crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause'
1112 # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE
1113 crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause'
1114 # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE
1115 crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause'
1116 # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE
1117 crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause'
1118 # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE
1119 crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause'
1120 # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE
1121 crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause'
1122 # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE
1123 crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause'
1124 # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE
1125 crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT'
1126 # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE
1127 crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT'
1128 # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE
1129 crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0'
1130 # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md
1131 crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0'
1132 # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE
1133 crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0'
1134 # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt
1135 crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0'
1136 # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE
1137 crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0'
1138 # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE
1139 crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense'
1140 # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md
1141 crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib'
1142
1143 commonlicdir = d.getVar('COMMON_LICENSE_DIR')
1144 for fn in sorted(os.listdir(commonlicdir)):
1145 md5value, lictext = crunch_license(os.path.join(commonlicdir, fn))
1146 if md5value not in crunched_md5sums:
1147 crunched_md5sums[md5value] = fn
1148 elif fn != crunched_md5sums[md5value]:
1149 bb.debug(2, "crunched_md5sums['%s'] is already set to '%s' rather than '%s'" % (md5value, crunched_md5sums[md5value], fn))
1150 else:
1151 bb.debug(2, "crunched_md5sums['%s'] is already set to '%s'" % (md5value, crunched_md5sums[md5value]))
1152
1153 return crunched_md5sums
1154
1155def crunch_license(licfile):
1156 '''
1157 Remove non-material text from a license file and then calculate its
1158 md5sum. This works well for licenses that contain a copyright statement,
1159 but is also a useful way to handle people's insistence upon reformatting
1160 the license text slightly (with no material difference to the text of the
1161 license).
1162 '''
1163
1164 import oe.utils
1165
1166 # Note: these are carefully constructed!
1167 license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$')
1168 license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
1169 copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$')
1170 disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$')
1171 email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$')
1172 header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$')
1173 tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$')
1174 url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$')
1175
1176 lictext = []
1177 with open(licfile, 'r', errors='surrogateescape') as f:
1178 for line in f:
1179 # Drop opening statements
1180 if copyright_re.match(line):
1181 continue
1182 elif disclaimer_re.match(line):
1183 continue
1184 elif email_re.match(line):
1185 continue
1186 elif header_re.match(line):
1187 continue
1188 elif tag_re.match(line):
1189 continue
1190 elif url_re.match(line):
1191 continue
1192 elif license_title_re.match(line):
1193 continue
1194 elif license_statement_re.match(line):
1195 continue
1196 # Strip comment symbols
1197 line = line.replace('*', '') \
1198 .replace('#', '')
1199 # Unify spelling
1200 line = line.replace('sub-license', 'sublicense')
1201 # Squash spaces
1202 line = oe.utils.squashspaces(line.strip())
1203 # Replace smart quotes, double quotes and backticks with single quotes
1204 line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'')
1205 # Unify brackets
1206 line = line.replace("{", "[").replace("}", "]")
1207 if line:
1208 lictext.append(line)
1209
1210 m = hashlib.md5()
1211 try:
1212 m.update(' '.join(lictext).encode('utf-8'))
1213 md5val = m.hexdigest()
1214 except UnicodeEncodeError:
1215 md5val = None
1216 lictext = ''
1217 return md5val, lictext
1218
1219def guess_license(srctree, d):
1220 import bb
1221 md5sums = get_license_md5sums(d)
1222
1223 crunched_md5sums = crunch_known_licenses(d)
1224
1225 licenses = []
1226 licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10']
1227 skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go")
1228 licfiles = []
1229 for root, dirs, files in os.walk(srctree):
1230 for fn in files:
1231 if fn.endswith(skip_extensions):
1232 continue
1233 for spec in licspecs:
1234 if fnmatch.fnmatch(fn, spec):
1235 fullpath = os.path.join(root, fn)
1236 if not fullpath in licfiles:
1237 licfiles.append(fullpath)
1238 for licfile in sorted(licfiles):
1239 md5value = bb.utils.md5_file(licfile)
1240 license = md5sums.get(md5value, None)
1241 if not license:
1242 crunched_md5, lictext = crunch_license(licfile)
1243 license = crunched_md5sums.get(crunched_md5, None)
1244 if lictext and not license:
1245 license = 'Unknown'
1246 logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \
1247 "and replace `Unknown` with the license:\n" \
1248 "%s,Unknown" % (os.path.relpath(licfile, srctree), md5value))
1249 if license:
1250 licenses.append((license, os.path.relpath(licfile, srctree), md5value))
1251
1252 # FIXME should we grab at least one source file with a license header and add that too?
1253
1254 return licenses
1255
1256def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): 1040def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'):
1257 """ 1041 """
1258 Given a list of (license, path, md5sum) as returned by guess_license(), 1042 Given a list of (license, path, md5sum) as returned by match_licenses(),
1259 a dict of package name to path mappings, write out a set of 1043 a dict of package name to path mappings, write out a set of
1260 package-specific LICENSE values. 1044 package-specific LICENSE values.
1261 """ 1045 """
@@ -1284,6 +1068,14 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn
1284 outlicenses[pkgname] = licenses 1068 outlicenses[pkgname] = licenses
1285 return outlicenses 1069 return outlicenses
1286 1070
1071def generate_common_licenses_chksums(common_licenses, d):
1072 lic_files_chksums = []
1073 for license in tidy_licenses(common_licenses):
1074 licfile = '${COMMON_LICENSE_DIR}/' + license
1075 md5value = bb.utils.md5_file(d.expand(licfile))
1076 lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value))
1077 return lic_files_chksums
1078
1287def read_pkgconfig_provides(d): 1079def read_pkgconfig_provides(d):
1288 pkgdatadir = d.getVar('PKGDATA_DIR') 1080 pkgdatadir = d.getVar('PKGDATA_DIR')
1289 pkgmap = {} 1081 pkgmap = {}
@@ -1418,4 +1210,3 @@ def register_commands(subparsers):
1418 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) 1210 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS)
1419 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') 1211 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).')
1420 parser_create.set_defaults(func=create_recipe) 1212 parser_create.set_defaults(func=create_recipe)
1421
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py
index a85a2f2786..4b1fa39d13 100644
--- a/scripts/lib/recipetool/create_go.py
+++ b/scripts/lib/recipetool/create_go.py
@@ -10,13 +10,7 @@
10# 10#
11 11
12 12
13from collections import namedtuple
14from enum import Enum
15from html.parser import HTMLParser
16from recipetool.create import RecipeHandler, handle_license_vars 13from recipetool.create import RecipeHandler, handle_license_vars
17from recipetool.create import guess_license, tidy_licenses, fixup_license
18from recipetool.create import determine_from_url
19from urllib.error import URLError, HTTPError
20 14
21import bb.utils 15import bb.utils
22import json 16import json
@@ -25,33 +19,20 @@ import os
25import re 19import re
26import subprocess 20import subprocess
27import sys 21import sys
28import shutil
29import tempfile 22import tempfile
30import urllib.parse
31import urllib.request
32 23
33 24
34GoImport = namedtuple('GoImport', 'root vcs url suffix')
35logger = logging.getLogger('recipetool') 25logger = logging.getLogger('recipetool')
36CodeRepo = namedtuple(
37 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor')
38 26
39tinfoil = None 27tinfoil = None
40 28
41# Regular expression to parse pseudo semantic version
42# see https://go.dev/ref/mod#pseudo-versions
43re_pseudo_semver = re.compile(
44 r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$")
45# Regular expression to parse semantic version
46re_semver = re.compile(
47 r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
48
49 29
50def tinfoil_init(instance): 30def tinfoil_init(instance):
51 global tinfoil 31 global tinfoil
52 tinfoil = instance 32 tinfoil = instance
53 33
54 34
35
55class GoRecipeHandler(RecipeHandler): 36class GoRecipeHandler(RecipeHandler):
56 """Class to handle the go recipe creation""" 37 """Class to handle the go recipe creation"""
57 38
@@ -83,578 +64,6 @@ class GoRecipeHandler(RecipeHandler):
83 64
84 return bindir 65 return bindir
85 66
86 def __resolve_repository_static(self, modulepath):
87 """Resolve the repository in a static manner
88
89 The method is based on the go implementation of
90 `repoRootFromVCSPaths` in
91 https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
92 """
93
94 url = urllib.parse.urlparse("https://" + modulepath)
95 req = urllib.request.Request(url.geturl())
96
97 try:
98 resp = urllib.request.urlopen(req)
99 # Some modulepath are just redirects to github (or some other vcs
100 # hoster). Therefore, we check if this modulepath redirects to
101 # somewhere else
102 if resp.geturl() != url.geturl():
103 bb.debug(1, "%s is redirectred to %s" %
104 (url.geturl(), resp.geturl()))
105 url = urllib.parse.urlparse(resp.geturl())
106 modulepath = url.netloc + url.path
107
108 except URLError as url_err:
109 # This is probably because the module path
110 # contains the subdir and major path. Thus,
111 # we ignore this error for now
112 logger.debug(
113 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err)))
114
115 host, _, _ = modulepath.partition('/')
116
117 class vcs(Enum):
118 pathprefix = "pathprefix"
119 regexp = "regexp"
120 type = "type"
121 repo = "repo"
122 check = "check"
123 schemelessRepo = "schemelessRepo"
124
125 # GitHub
126 vcsGitHub = {}
127 vcsGitHub[vcs.pathprefix] = "github.com"
128 vcsGitHub[vcs.regexp] = re.compile(
129 r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
130 vcsGitHub[vcs.type] = "git"
131 vcsGitHub[vcs.repo] = "https://\\g<root>"
132
133 # Bitbucket
134 vcsBitbucket = {}
135 vcsBitbucket[vcs.pathprefix] = "bitbucket.org"
136 vcsBitbucket[vcs.regexp] = re.compile(
137 r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
138 vcsBitbucket[vcs.type] = "git"
139 vcsBitbucket[vcs.repo] = "https://\\g<root>"
140
141 # IBM DevOps Services (JazzHub)
142 vcsIBMDevOps = {}
143 vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git"
144 vcsIBMDevOps[vcs.regexp] = re.compile(
145 r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
146 vcsIBMDevOps[vcs.type] = "git"
147 vcsIBMDevOps[vcs.repo] = "https://\\g<root>"
148
149 # Git at Apache
150 vcsApacheGit = {}
151 vcsApacheGit[vcs.pathprefix] = "git.apache.org"
152 vcsApacheGit[vcs.regexp] = re.compile(
153 r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
154 vcsApacheGit[vcs.type] = "git"
155 vcsApacheGit[vcs.repo] = "https://\\g<root>"
156
157 # Git at OpenStack
158 vcsOpenStackGit = {}
159 vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org"
160 vcsOpenStackGit[vcs.regexp] = re.compile(
161 r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
162 vcsOpenStackGit[vcs.type] = "git"
163 vcsOpenStackGit[vcs.repo] = "https://\\g<root>"
164
165 # chiselapp.com for fossil
166 vcsChiselapp = {}
167 vcsChiselapp[vcs.pathprefix] = "chiselapp.com"
168 vcsChiselapp[vcs.regexp] = re.compile(
169 r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$')
170 vcsChiselapp[vcs.type] = "fossil"
171 vcsChiselapp[vcs.repo] = "https://\\g<root>"
172
173 # General syntax for any server.
174 # Must be last.
175 vcsGeneralServer = {}
176 vcsGeneralServer[vcs.regexp] = re.compile(
177 "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$")
178 vcsGeneralServer[vcs.schemelessRepo] = True
179
180 vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps,
181 vcsApacheGit, vcsOpenStackGit, vcsChiselapp,
182 vcsGeneralServer]
183
184 if modulepath.startswith("example.net") or modulepath == "rsc.io":
185 logger.warning("Suspicious module path %s" % modulepath)
186 return None
187 if modulepath.startswith("http:") or modulepath.startswith("https:"):
188 logger.warning("Import path should not start with %s %s" %
189 ("http", "https"))
190 return None
191
192 rootpath = None
193 vcstype = None
194 repourl = None
195 suffix = None
196
197 for srv in vcsPaths:
198 m = srv[vcs.regexp].match(modulepath)
199 if vcs.pathprefix in srv:
200 if host == srv[vcs.pathprefix]:
201 rootpath = m.group('root')
202 vcstype = srv[vcs.type]
203 repourl = m.expand(srv[vcs.repo])
204 suffix = m.group('suffix')
205 break
206 elif m and srv[vcs.schemelessRepo]:
207 rootpath = m.group('root')
208 vcstype = m[vcs.type]
209 repourl = m[vcs.repo]
210 suffix = m.group('suffix')
211 break
212
213 return GoImport(rootpath, vcstype, repourl, suffix)
214
215 def __resolve_repository_dynamic(self, modulepath):
216 """Resolve the repository root in a dynamic manner.
217
218 The method is based on the go implementation of
219 `repoRootForImportDynamic` in
220 https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
221 """
222 url = urllib.parse.urlparse("https://" + modulepath)
223
224 class GoImportHTMLParser(HTMLParser):
225
226 def __init__(self):
227 super().__init__()
228 self.__srv = {}
229
230 def handle_starttag(self, tag, attrs):
231 if tag == 'meta' and list(
232 filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)):
233 content = list(
234 filter(lambda a: (a[0] == 'content'), attrs))
235 if content:
236 srv = content[0][1].split()
237 self.__srv[srv[0]] = srv
238
239 def go_import(self, modulepath):
240 if modulepath in self.__srv:
241 srv = self.__srv[modulepath]
242 return GoImport(srv[0], srv[1], srv[2], None)
243 return None
244
245 url = url.geturl() + "?go-get=1"
246 req = urllib.request.Request(url)
247
248 try:
249 body = urllib.request.urlopen(req).read()
250 except HTTPError as http_err:
251 logger.warning(
252 "Unclean status when fetching page from [%s]: %s", url, str(http_err))
253 body = http_err.fp.read()
254 except URLError as url_err:
255 logger.warning(
256 "Failed to fetch page from [%s]: %s", url, str(url_err))
257 return None
258
259 parser = GoImportHTMLParser()
260 parser.feed(body.decode('utf-8'))
261 parser.close()
262
263 return parser.go_import(modulepath)
264
265 def __resolve_from_golang_proxy(self, modulepath, version):
266 """
267 Resolves repository data from golang proxy
268 """
269 url = urllib.parse.urlparse("https://proxy.golang.org/"
270 + modulepath
271 + "/@v/"
272 + version
273 + ".info")
274
275 # Transform url to lower case, golang proxy doesn't like mixed case
276 req = urllib.request.Request(url.geturl().lower())
277
278 try:
279 resp = urllib.request.urlopen(req)
280 except URLError as url_err:
281 logger.warning(
282 "Failed to fetch page from [%s]: %s", url, str(url_err))
283 return None
284
285 golang_proxy_res = resp.read().decode('utf-8')
286 modinfo = json.loads(golang_proxy_res)
287
288 if modinfo and 'Origin' in modinfo:
289 origin = modinfo['Origin']
290 _root_url = urllib.parse.urlparse(origin['URL'])
291
292 # We normalize the repo URL since we don't want the scheme in it
293 _subdir = origin['Subdir'] if 'Subdir' in origin else None
294 _root, _, _ = self.__split_path_version(modulepath)
295 if _subdir:
296 _root = _root[:-len(_subdir)].strip('/')
297
298 _commit = origin['Hash']
299 _vcs = origin['VCS']
300 return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit)
301
302 return None
303
304 def __resolve_repository(self, modulepath):
305 """
306 Resolves src uri from go module-path
307 """
308 repodata = self.__resolve_repository_static(modulepath)
309 if not repodata or not repodata.url:
310 repodata = self.__resolve_repository_dynamic(modulepath)
311 if not repodata or not repodata.url:
312 logger.error(
313 "Could not resolve repository for module path '%s'" % modulepath)
314 # There is no way to recover from this
315 sys.exit(14)
316 if repodata:
317 logger.debug(1, "Resolved download path for import '%s' => %s" % (
318 modulepath, repodata.url))
319 return repodata
320
321 def __split_path_version(self, path):
322 i = len(path)
323 dot = False
324 for j in range(i, 0, -1):
325 if path[j - 1] < '0' or path[j - 1] > '9':
326 break
327 if path[j - 1] == '.':
328 dot = True
329 break
330 i = j - 1
331
332 if i <= 1 or i == len(
333 path) or path[i - 1] != 'v' or path[i - 2] != '/':
334 return path, "", True
335
336 prefix, pathMajor = path[:i - 2], path[i - 2:]
337 if dot or len(
338 pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1":
339 return path, "", False
340
341 return prefix, pathMajor, True
342
343 def __get_path_major(self, pathMajor):
344 if not pathMajor:
345 return ""
346
347 if pathMajor[0] != '/' and pathMajor[0] != '.':
348 logger.error(
349 "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor)
350
351 if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"):
352 pathMajor = pathMajor[:len("-unstable") - 2]
353
354 return pathMajor[1:]
355
356 def __build_coderepo(self, repo, path):
357 codedir = ""
358 pathprefix, pathMajor, _ = self.__split_path_version(path)
359 if repo.root == path:
360 pathprefix = path
361 elif path.startswith(repo.root):
362 codedir = pathprefix[len(repo.root):].strip('/')
363
364 pseudoMajor = self.__get_path_major(pathMajor)
365
366 logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'",
367 repo.root, codedir, pathprefix, pathMajor, pseudoMajor)
368
369 return CodeRepo(path, repo.root, codedir,
370 pathMajor, pathprefix, pseudoMajor)
371
372 def __resolve_version(self, repo, path, version):
373 hash = None
374 coderoot = self.__build_coderepo(repo, path)
375
376 def vcs_fetch_all():
377 tmpdir = tempfile.mkdtemp()
378 clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir)
379 bb.process.run(clone_cmd)
380 log_cmd = "git log --all --pretty='%H %d' --decorate=short"
381 output, _ = bb.process.run(
382 log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir)
383 bb.utils.prunedir(tmpdir)
384 return output.strip().split('\n')
385
386 def vcs_fetch_remote(tag):
387 # add * to grab ^{}
388 refs = {}
389 ls_remote_cmd = "git ls-remote -q --tags {} {}*".format(
390 repo.url, tag)
391 output, _ = bb.process.run(ls_remote_cmd)
392 output = output.strip().split('\n')
393 for line in output:
394 f = line.split(maxsplit=1)
395 if len(f) != 2:
396 continue
397
398 for prefix in ["HEAD", "refs/heads/", "refs/tags/"]:
399 if f[1].startswith(prefix):
400 refs[f[1][len(prefix):]] = f[0]
401
402 for key, hash in refs.items():
403 if key.endswith(r"^{}"):
404 refs[key.strip(r"^{}")] = hash
405
406 return refs[tag]
407
408 m_pseudo_semver = re_pseudo_semver.match(version)
409
410 if m_pseudo_semver:
411 remote_refs = vcs_fetch_all()
412 short_commit = m_pseudo_semver.group('commithash')
413 for l in remote_refs:
414 r = l.split(maxsplit=1)
415 sha1 = r[0] if len(r) else None
416 if not sha1:
417 logger.error(
418 "Ups: could not resolve abbref commit for %s" % short_commit)
419
420 elif sha1.startswith(short_commit):
421 hash = sha1
422 break
423 else:
424 m_semver = re_semver.match(version)
425 if m_semver:
426
427 def get_sha1_remote(re):
428 rsha1 = None
429 for line in remote_refs:
430 # Split lines of the following format:
431 # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag)
432 lineparts = line.split(maxsplit=1)
433 sha1 = lineparts[0] if len(lineparts) else None
434 refstring = lineparts[1] if len(
435 lineparts) == 2 else None
436 if refstring:
437 # Normalize tag string and split in case of multiple
438 # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...)
439 refs = refstring.strip('(), ').split(',')
440 for ref in refs:
441 if re.match(ref.strip()):
442 rsha1 = sha1
443 return rsha1
444
445 semver = "v" + m_semver.group('major') + "."\
446 + m_semver.group('minor') + "."\
447 + m_semver.group('patch') \
448 + (("-" + m_semver.group('prerelease'))
449 if m_semver.group('prerelease') else "")
450
451 tag = os.path.join(
452 coderoot.codeDir, semver) if coderoot.codeDir else semver
453
454 # probe tag using 'ls-remote', which is faster than fetching
455 # complete history
456 hash = vcs_fetch_remote(tag)
457 if not hash:
458 # backup: fetch complete history
459 remote_refs = vcs_fetch_all()
460 hash = get_sha1_remote(
461 re.compile(fr"(tag:|HEAD ->) ({tag})"))
462
463 logger.debug(
464 "Resolving commit for tag '%s' -> '%s'", tag, hash)
465 return hash
466
467 def __generate_srcuri_inline_fcn(self, path, version, replaces=None):
468 """Generate SRC_URI functions for go imports"""
469
470 logger.info("Resolving repository for module %s", path)
471 # First try to resolve repo and commit from golang proxy
472 # Most info is already there and we don't have to go through the
473 # repository or even perform the version resolve magic
474 golang_proxy_info = self.__resolve_from_golang_proxy(path, version)
475 if golang_proxy_info:
476 repo = golang_proxy_info[0]
477 commit = golang_proxy_info[1]
478 else:
479 # Fallback
480 # Resolve repository by 'hand'
481 repo = self.__resolve_repository(path)
482 commit = self.__resolve_version(repo, path, version)
483
484 url = urllib.parse.urlparse(repo.url)
485 repo_url = url.netloc + url.path
486
487 coderoot = self.__build_coderepo(repo, path)
488
489 inline_fcn = "${@go_src_uri("
490 inline_fcn += f"'{repo_url}','{version}'"
491 if repo_url != path:
492 inline_fcn += f",path='{path}'"
493 if coderoot.codeDir:
494 inline_fcn += f",subdir='{coderoot.codeDir}'"
495 if repo.vcs != 'git':
496 inline_fcn += f",vcs='{repo.vcs}'"
497 if replaces:
498 inline_fcn += f",replaces='{replaces}'"
499 if coderoot.pathMajor:
500 inline_fcn += f",pathmajor='{coderoot.pathMajor}'"
501 inline_fcn += ")}"
502
503 return inline_fcn, commit
504
505 def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d):
506
507 import re
508 src_uris = []
509 src_revs = []
510
511 def generate_src_rev(path, version, commithash):
512 src_rev = f"# {path}@{version} => {commithash}\n"
513 # Ups...maybe someone manipulated the source repository and the
514 # version or commit could not be resolved. This is a sign of
515 # a) the supply chain was manipulated (bad)
516 # b) the implementation for the version resolving didn't work
517 # anymore (less bad)
518 if not commithash:
519 src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
520 src_rev += f"#!!! Could not resolve version !!!\n"
521 src_rev += f"#!!! Possible supply chain attack !!!\n"
522 src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
523 src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\""
524
525 return src_rev
526
527 # we first go over replacement list, because we are essentialy
528 # interested only in the replaced path
529 if go_mod['Replace']:
530 for replacement in go_mod['Replace']:
531 oldpath = replacement['Old']['Path']
532 path = replacement['New']['Path']
533 version = ''
534 if 'Version' in replacement['New']:
535 version = replacement['New']['Version']
536
537 if os.path.exists(os.path.join(srctree, path)):
538 # the module refers to the local path, remove it from requirement list
539 # because it's a local module
540 go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath]
541 else:
542 # Replace the path and the version, so we don't iterate replacement list anymore
543 for require in go_mod['Require']:
544 if require['Path'] == oldpath:
545 require.update({'Path': path, 'Version': version})
546 break
547
548 for require in go_mod['Require']:
549 path = require['Path']
550 version = require['Version']
551
552 inline_fcn, commithash = self.__generate_srcuri_inline_fcn(
553 path, version)
554 src_uris.append(inline_fcn)
555 src_revs.append(generate_src_rev(path, version, commithash))
556
557 # strip version part from module URL /vXX
558 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
559 pn, _ = determine_from_url(baseurl)
560 go_mods_basename = "%s-modules.inc" % pn
561
562 go_mods_filename = os.path.join(localfilesdir, go_mods_basename)
563 with open(go_mods_filename, "w") as f:
564 # We introduce this indirection to make the tests a little easier
565 f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n")
566 f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n")
567 for uri in src_uris:
568 f.write(" " + uri + " \\\n")
569 f.write("\"\n\n")
570 for rev in src_revs:
571 f.write(rev + "\n")
572
573 extravalues['extrafiles'][go_mods_basename] = go_mods_filename
574
575 def __go_run_cmd(self, cmd, cwd, d):
576 return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')),
577 shell=True, cwd=cwd)
578
579 def __go_native_version(self, d):
580 stdout, _ = self.__go_run_cmd("go version", None, d)
581 m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout)
582 major = int(m.group(2))
583 minor = int(m.group(3))
584 patch = int(m.group(4))
585
586 return major, minor, patch
587
588 def __go_mod_patch(self, srctree, localfilesdir, extravalues, d):
589
590 patchfilename = "go.mod.patch"
591 go_native_version_major, go_native_version_minor, _ = self.__go_native_version(
592 d)
593 self.__go_run_cmd("go mod tidy -go=%d.%d" %
594 (go_native_version_major, go_native_version_minor), srctree, d)
595 stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
596
597 # Create patch in order to upgrade go version
598 self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d)
599 # Restore original state
600 self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d)
601
602 go_mod = json.loads(stdout)
603 tmpfile = os.path.join(localfilesdir, patchfilename)
604 shutil.move(os.path.join(srctree, patchfilename), tmpfile)
605
606 extravalues['extrafiles'][patchfilename] = tmpfile
607
608 return go_mod, patchfilename
609
610 def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d):
611 # Perform vendoring to retrieve the correct modules.txt
612 tmp_vendor_dir = tempfile.mkdtemp()
613
614 # -v causes to go to print modules.txt to stderr
615 _, stderr = self.__go_run_cmd(
616 "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d)
617
618 modules_txt_basename = "modules.txt"
619 modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename)
620 with open(modules_txt_filename, "w") as f:
621 f.write(stderr)
622
623 extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename
624
625 licenses = []
626 lic_files_chksum = []
627 licvalues = guess_license(tmp_vendor_dir, d)
628 shutil.rmtree(tmp_vendor_dir)
629
630 if licvalues:
631 for licvalue in licvalues:
632 license = licvalue[0]
633 lics = tidy_licenses(fixup_license(license))
634 lics = [lic for lic in lics if lic not in licenses]
635 if len(lics):
636 licenses.extend(lics)
637 lic_files_chksum.append(
638 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2]))
639
640 # strip version part from module URL /vXX
641 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
642 pn, _ = determine_from_url(baseurl)
643 licenses_basename = "%s-licenses.inc" % pn
644
645 licenses_filename = os.path.join(localfilesdir, licenses_basename)
646 with open(licenses_filename, "w") as f:
647 f.write("GO_MOD_LICENSES = \"%s\"\n\n" %
648 ' & '.join(sorted(licenses, key=str.casefold)))
649 # We introduce this indirection to make the tests a little easier
650 f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n")
651 f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n")
652 for lic in lic_files_chksum:
653 f.write(" " + lic + " \\\n")
654 f.write("\"\n")
655
656 extravalues['extrafiles'][licenses_basename] = licenses_filename
657
658 def process(self, srctree, classes, lines_before, 67 def process(self, srctree, classes, lines_before,
659 lines_after, handled, extravalues): 68 lines_after, handled, extravalues):
660 69
@@ -665,63 +74,52 @@ class GoRecipeHandler(RecipeHandler):
665 if not files: 74 if not files:
666 return False 75 return False
667 76
668 d = bb.data.createCopy(tinfoil.config_data)
669 go_bindir = self.__ensure_go() 77 go_bindir = self.__ensure_go()
670 if not go_bindir: 78 if not go_bindir:
671 sys.exit(14) 79 sys.exit(14)
672 80
673 d.prependVar('PATH', '%s:' % go_bindir)
674 handled.append('buildsystem') 81 handled.append('buildsystem')
675 classes.append("go-vendor") 82 classes.append("go-mod")
676 83
677 stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) 84 # Use go-mod-update-modules to set the full SRC_URI and LICENSE
85 classes.append("go-mod-update-modules")
86 extravalues["run_tasks"] = "update_modules"
678 87
679 go_mod = json.loads(stdout) 88 with tempfile.TemporaryDirectory(prefix="go-mod-") as tmp_mod_dir:
680 go_import = go_mod['Module']['Path'] 89 env = dict(os.environ)
681 go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go']) 90 env["PATH"] += f":{go_bindir}"
682 go_version_major = int(go_version_match.group(1)) 91 env['GOMODCACHE'] = tmp_mod_dir
683 go_version_minor = int(go_version_match.group(2))
684 src_uris = []
685 92
686 localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') 93 stdout = subprocess.check_output(["go", "mod", "edit", "-json"], cwd=srctree, env=env, text=True)
687 extravalues.setdefault('extrafiles', {}) 94 go_mod = json.loads(stdout)
95 go_import = re.sub(r'/v([0-9]+)$', '', go_mod['Module']['Path'])
688 96
689 # Use an explicit name determined from the module name because it 97 localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-')
690 # might differ from the actual URL for replaced modules 98 extravalues.setdefault('extrafiles', {})
691 # strip version part from module URL /vXX
692 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
693 pn, _ = determine_from_url(baseurl)
694 99
695 # go.mod files with version < 1.17 may not include all indirect 100 # Write the stub ${BPN}-licenses.inc and ${BPN}-go-mods.inc files
696 # dependencies. Thus, we have to upgrade the go version. 101 basename = "{pn}-licenses.inc"
697 if go_version_major == 1 and go_version_minor < 17: 102 filename = os.path.join(localfilesdir, basename)
698 logger.warning( 103 with open(filename, "w") as f:
699 "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.") 104 f.write("# FROM RECIPETOOL\n")
700 go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir, 105 extravalues['extrafiles'][f"../{basename}"] = filename
701 extravalues, d)
702 src_uris.append(
703 "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename))
704 106
705 # Check whether the module is vendored. If so, we have nothing to do. 107 basename = "{pn}-go-mods.inc"
706 # Otherwise we gather all dependencies and add them to the recipe 108 filename = os.path.join(localfilesdir, basename)
707 if not os.path.exists(os.path.join(srctree, "vendor")): 109 with open(filename, "w") as f:
110 f.write("# FROM RECIPETOOL\n")
111 extravalues['extrafiles'][f"../{basename}"] = filename
708 112
709 # Write additional $BPN-modules.inc file 113 # Do generic license handling
710 self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d) 114 d = bb.data.createCopy(tinfoil.config_data)
711 lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"") 115 handle_license_vars(srctree, lines_before, handled, extravalues, d)
712 lines_before.append("require %s-licenses.inc" % (pn)) 116 self.__rewrite_lic_vars(lines_before)
713 117
714 self.__rewrite_src_uri(lines_before, ["file://modules.txt"]) 118 self.__rewrite_src_uri(lines_before)
715 119
716 self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d) 120 lines_before.append('require ${BPN}-licenses.inc')
717 lines_before.append("require %s-modules.inc" % (pn)) 121 lines_before.append('require ${BPN}-go-mods.inc')
718 122 lines_before.append(f'GO_IMPORT = "{go_import}"')
719 # Do generic license handling
720 handle_license_vars(srctree, lines_before, handled, extravalues, d)
721 self.__rewrite_lic_uri(lines_before)
722
723 lines_before.append("GO_IMPORT = \"{}\"".format(baseurl))
724 lines_before.append("SRCREV_FORMAT = \"${BPN}\"")
725 123
726 def __update_lines_before(self, updated, newlines, lines_before): 124 def __update_lines_before(self, updated, newlines, lines_before):
727 if updated: 125 if updated:
@@ -733,9 +131,9 @@ class GoRecipeHandler(RecipeHandler):
733 lines_before.append(line) 131 lines_before.append(line)
734 return updated 132 return updated
735 133
736 def __rewrite_lic_uri(self, lines_before): 134 def __rewrite_lic_vars(self, lines_before):
737
738 def varfunc(varname, origvalue, op, newlines): 135 def varfunc(varname, origvalue, op, newlines):
136 import urllib.parse
739 if varname == 'LIC_FILES_CHKSUM': 137 if varname == 'LIC_FILES_CHKSUM':
740 new_licenses = [] 138 new_licenses = []
741 licenses = origvalue.split('\\') 139 licenses = origvalue.split('\\')
@@ -760,12 +158,11 @@ class GoRecipeHandler(RecipeHandler):
760 lines_before, ['LIC_FILES_CHKSUM'], varfunc) 158 lines_before, ['LIC_FILES_CHKSUM'], varfunc)
761 return self.__update_lines_before(updated, newlines, lines_before) 159 return self.__update_lines_before(updated, newlines, lines_before)
762 160
763 def __rewrite_src_uri(self, lines_before, additional_uris = []): 161 def __rewrite_src_uri(self, lines_before):
764 162
765 def varfunc(varname, origvalue, op, newlines): 163 def varfunc(varname, origvalue, op, newlines):
766 if varname == 'SRC_URI': 164 if varname == 'SRC_URI':
767 src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"] 165 src_uri = ['git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}']
768 src_uri.extend(additional_uris)
769 return src_uri, None, -1, True 166 return src_uri, None, -1, True
770 return origvalue, None, 0, True 167 return origvalue, None, 0, True
771 168
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py
index 113a89f6a6..8c4cdd5234 100644
--- a/scripts/lib/recipetool/create_npm.py
+++ b/scripts/lib/recipetool/create_npm.py
@@ -15,9 +15,9 @@ import bb
15from bb.fetch2.npm import NpmEnvironment 15from bb.fetch2.npm import NpmEnvironment
16from bb.fetch2.npm import npm_package 16from bb.fetch2.npm import npm_package
17from bb.fetch2.npmsw import foreach_dependencies 17from bb.fetch2.npmsw import foreach_dependencies
18from oe.license_finder import match_licenses, find_license_files
18from recipetool.create import RecipeHandler 19from recipetool.create import RecipeHandler
19from recipetool.create import get_license_md5sums 20from recipetool.create import generate_common_licenses_chksums
20from recipetool.create import guess_license
21from recipetool.create import split_pkg_licenses 21from recipetool.create import split_pkg_licenses
22logger = logging.getLogger('recipetool') 22logger = logging.getLogger('recipetool')
23 23
@@ -112,40 +112,54 @@ class NpmRecipeHandler(RecipeHandler):
112 """Return the extra license files and the list of packages""" 112 """Return the extra license files and the list of packages"""
113 licfiles = [] 113 licfiles = []
114 packages = {} 114 packages = {}
115 # Licenses from package.json will point to COMMON_LICENSE_DIR so we need
116 # to associate them explicitely to packages for split_pkg_licenses()
117 fallback_licenses = dict()
118
119 def _find_package_licenses(destdir):
120 """Either find license files, or use package.json metadata"""
121 def _get_licenses_from_package_json(package_json):
122 with open(os.path.join(srctree, package_json), "r") as f:
123 data = json.load(f)
124 if "license" in data:
125 licenses = data["license"].split(" ")
126 licenses = [license.strip("()") for license in licenses if license != "OR" and license != "AND"]
127 return [], licenses
128 else:
129 return [package_json], None
115 130
116 # Handle the parent package
117 packages["${PN}"] = ""
118
119 def _licfiles_append_fallback_readme_files(destdir):
120 """Append README files as fallback to license files if a license files is missing"""
121
122 fallback = True
123 readmes = []
124 basedir = os.path.join(srctree, destdir) 131 basedir = os.path.join(srctree, destdir)
125 for fn in os.listdir(basedir): 132 licfiles = find_license_files(basedir)
126 upper = fn.upper() 133 if len(licfiles) > 0:
127 if upper.startswith("README"): 134 return licfiles, None
128 fullpath = os.path.join(basedir, fn) 135 else:
129 readmes.append(fullpath) 136 # A license wasn't found in the package directory, so we'll use the package.json metadata
130 if upper.startswith("COPYING") or "LICENCE" in upper or "LICENSE" in upper: 137 pkg_json = os.path.join(basedir, "package.json")
131 fallback = False 138 return _get_licenses_from_package_json(pkg_json)
132 if fallback: 139
133 for readme in readmes: 140 def _get_package_licenses(destdir, package):
134 licfiles.append(os.path.relpath(readme, srctree)) 141 (package_licfiles, package_licenses) = _find_package_licenses(destdir)
142 if package_licfiles:
143 licfiles.extend(package_licfiles)
144 else:
145 fallback_licenses[package] = package_licenses
135 146
136 # Handle the dependencies 147 # Handle the dependencies
137 def _handle_dependency(name, params, destdir): 148 def _handle_dependency(name, params, destdir):
138 deptree = destdir.split('node_modules/') 149 deptree = destdir.split('node_modules/')
139 suffix = "-".join([npm_package(dep) for dep in deptree]) 150 suffix = "-".join([npm_package(dep) for dep in deptree])
140 packages["${PN}" + suffix] = destdir 151 packages["${PN}" + suffix] = destdir
141 _licfiles_append_fallback_readme_files(destdir) 152 _get_package_licenses(destdir, "${PN}" + suffix)
142 153
143 with open(shrinkwrap_file, "r") as f: 154 with open(shrinkwrap_file, "r") as f:
144 shrinkwrap = json.load(f) 155 shrinkwrap = json.load(f)
145
146 foreach_dependencies(shrinkwrap, _handle_dependency, dev) 156 foreach_dependencies(shrinkwrap, _handle_dependency, dev)
147 157
148 return licfiles, packages 158 # Handle the parent package
159 packages["${PN}"] = ""
160 _get_package_licenses(srctree, "${PN}")
161
162 return licfiles, packages, fallback_licenses
149 163
150 # Handle the peer dependencies 164 # Handle the peer dependencies
151 def _handle_peer_dependency(self, shrinkwrap_file): 165 def _handle_peer_dependency(self, shrinkwrap_file):
@@ -266,36 +280,12 @@ class NpmRecipeHandler(RecipeHandler):
266 fetcher.unpack(srctree) 280 fetcher.unpack(srctree)
267 281
268 bb.note("Handling licences ...") 282 bb.note("Handling licences ...")
269 (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev) 283 (licfiles, packages, fallback_licenses) = self._handle_licenses(srctree, shrinkwrap_file, dev)
270 284 licvalues = match_licenses(licfiles, srctree, d)
271 def _guess_odd_license(licfiles): 285 split_pkg_licenses(licvalues, packages, lines_after, fallback_licenses)
272 import bb 286 fallback_licenses_flat = [license for sublist in fallback_licenses.values() for license in sublist]
273 287 extravalues["LIC_FILES_CHKSUM"] = generate_common_licenses_chksums(fallback_licenses_flat, d)
274 md5sums = get_license_md5sums(d, linenumbers=True) 288 extravalues["LICENSE"] = fallback_licenses_flat
275
276 chksums = []
277 licenses = []
278 for licfile in licfiles:
279 f = os.path.join(srctree, licfile)
280 md5value = bb.utils.md5_file(f)
281 (license, beginline, endline, md5) = md5sums.get(md5value,
282 (None, "", "", ""))
283 if not license:
284 license = "Unknown"
285 logger.info("Please add the following line for '%s' to a "
286 "'lib/recipetool/licenses.csv' and replace `Unknown`, "
287 "`X`, `Y` and `MD5` with the license, begin line, "
288 "end line and partial MD5 checksum:\n" \
289 "%s,Unknown,X,Y,MD5" % (licfile, md5value))
290 chksums.append("file://%s%s%s;md5=%s" % (licfile,
291 ";beginline=%s" % (beginline) if beginline else "",
292 ";endline=%s" % (endline) if endline else "",
293 md5 if md5 else md5value))
294 licenses.append((license, licfile, md5value))
295 return (licenses, chksums)
296
297 (licenses, extravalues["LIC_FILES_CHKSUM"]) = _guess_odd_license(licfiles)
298 split_pkg_licenses([*licenses, *guess_license(srctree, d)], packages, lines_after)
299 289
300 classes.append("npm") 290 classes.append("npm")
301 handled.append("buildsystem") 291 handled.append("buildsystem")
diff --git a/scripts/lib/recipetool/licenses.csv b/scripts/lib/recipetool/licenses.csv
deleted file mode 100644
index 80851111b3..0000000000
--- a/scripts/lib/recipetool/licenses.csv
+++ /dev/null
@@ -1,37 +0,0 @@
10636e73ff0215e8d672dc4c32c317bb3,GPL-2.0-only
212f884d2ae1ff87c09e5b7ccc2c4ca7e,GPL-2.0-only
318810669f13b87348459e611d31ab760,GPL-2.0-only
4252890d9eee26aab7b432e8b8a616475,LGPL-2.0-only
52d5025d4aa3495befef8f17206a5b0a1,LGPL-2.1-only
63214f080875748938ba060314b4f727d,LGPL-2.0-only
7385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0-only
8393a5ca445f6965873eca0259a17f833,GPL-2.0-only
93b83ef96387f14655fc854ddc3c6bd57,Apache-2.0
103bf50002aefd002f49e7bb854063f7e7,LGPL-2.0-only
114325afd396febcb659c36b49533135d4,GPL-2.0-only
124fbd65380cdd255951079008b364516c,LGPL-2.1-only
1354c7042be62e169199200bc6477f04d1,BSD-3-Clause
1455ca817ccb7d5b5b66355690e9abc605,LGPL-2.0-only
1559530bdf33659b29e73d4adb9f9f6552,GPL-2.0-only
165f30f0716dfdd0d91eb439ebec522ec2,LGPL-2.0-only
176a6a8e020838b23406c81b19c1d46df6,LGPL-3.0-only
18751419260aa954499f7abaabaa882bbe,GPL-2.0-only
197fbc338309ac38fefcd64b04bb903e34,LGPL-2.1-only
208ca43cbc842c2336e835926c2166c28b,GPL-2.0-only
2194d55d512a9ba36caa9b7df079bae19f,GPL-2.0-only
229ac2e7cff1ddaf48b6eab6028f23ef88,GPL-2.0-only
239f604d8a4f8e74f4f5140845a21b6674,LGPL-2.0-only
24a6f89e2100d9b6cdffcea4f398e37343,LGPL-2.1-only
25b234ee4d69f5fce4486a80fdaf4a4263,GPL-2.0-only
26bbb461211a33b134d42ed5ee802b37ff,LGPL-2.1-only
27bfe1f75d606912a4111c90743d6c7325,MPL-1.1-only
28c93c0550bd3173f4504b2cbd8991e50b,GPL-2.0-only
29d32239bcb673463ab874e80d47fae504,GPL-3.0-only
30d7810fab7487fb0aad327b76f1be7cd7,GPL-2.0-only
31d8045f3b8f929c1cb29a1e3fd737b499,LGPL-2.1-only
32db979804f025cf55aabec7129cb671ed,LGPL-2.0-only
33eb723b61539feef013de476e68b5c50a,GPL-2.0-only
34ebb5c50ab7cab4baeffba14977030c07,GPL-2.0-only
35f27defe1e96c2e1ecd4e0c9be8967949,GPL-3.0-only
36fad9b3332be894bab9bc501572864b29,LGPL-2.1-only
37fbc093901857fcd118f065f900982c24,LGPL-2.1-only
diff --git a/scripts/lib/resulttool/junit.py b/scripts/lib/resulttool/junit.py
new file mode 100644
index 0000000000..c7a53dc550
--- /dev/null
+++ b/scripts/lib/resulttool/junit.py
@@ -0,0 +1,77 @@
1# resulttool - report test results in JUnit XML format
2#
3# Copyright (c) 2024, Siemens AG.
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import re
10import xml.etree.ElementTree as ET
11import resulttool.resultutils as resultutils
12
13def junit(args, logger):
14 testresults = resultutils.load_resultsdata(args.json_file, configmap=resultutils.store_map)
15
16 total_time = 0
17 skipped = 0
18 failures = 0
19 errors = 0
20
21 for tests in testresults.values():
22 results = tests[next(reversed(tests))].get("result", {})
23
24 for result_id, result in results.items():
25 # filter out ptestresult.rawlogs and ptestresult.sections
26 if re.search(r'\.test_', result_id):
27 total_time += result.get("duration", 0)
28
29 if result['status'] == "FAILED":
30 failures += 1
31 elif result['status'] == "ERROR":
32 errors += 1
33 elif result['status'] == "SKIPPED":
34 skipped += 1
35
36 testsuites_node = ET.Element("testsuites")
37 testsuites_node.set("time", "%s" % total_time)
38 testsuite_node = ET.SubElement(testsuites_node, "testsuite")
39 testsuite_node.set("name", "Testimage")
40 testsuite_node.set("time", "%s" % total_time)
41 testsuite_node.set("tests", "%s" % len(results))
42 testsuite_node.set("failures", "%s" % failures)
43 testsuite_node.set("errors", "%s" % errors)
44 testsuite_node.set("skipped", "%s" % skipped)
45
46 for result_id, result in results.items():
47 if re.search(r'\.test_', result_id):
48 testcase_node = ET.SubElement(testsuite_node, "testcase", {
49 "name": result_id,
50 "classname": "Testimage",
51 "time": str(result['duration'])
52 })
53 if result['status'] == "SKIPPED":
54 ET.SubElement(testcase_node, "skipped", message=result['log'])
55 elif result['status'] == "FAILED":
56 ET.SubElement(testcase_node, "failure", message=result['log'])
57 elif result['status'] == "ERROR":
58 ET.SubElement(testcase_node, "error", message=result['log'])
59
60 tree = ET.ElementTree(testsuites_node)
61
62 if args.junit_xml_path is None:
63 args.junit_xml_path = os.environ['BUILDDIR'] + '/tmp/log/oeqa/junit.xml'
64 tree.write(args.junit_xml_path, encoding='UTF-8', xml_declaration=True)
65
66 logger.info('Saved JUnit XML report as %s' % args.junit_xml_path)
67
68def register_commands(subparsers):
69 """Register subcommands from this plugin"""
70 parser_build = subparsers.add_parser('junit', help='create test report in JUnit XML format',
71 description='generate unit test report in JUnit XML format based on the latest test results in the testresults.json.',
72 group='analysis')
73 parser_build.set_defaults(func=junit)
74 parser_build.add_argument('json_file',
75 help='json file should point to the testresults.json')
76 parser_build.add_argument('-j', '--junit_xml_path',
77 help='junit xml path allows setting the path of the generated test report. The default location is <build_dir>/tmp/log/oeqa/junit.xml')
diff --git a/scripts/lib/resulttool/manualexecution.py b/scripts/lib/resulttool/manualexecution.py
index ecb27c5933..ae0861ac6b 100755
--- a/scripts/lib/resulttool/manualexecution.py
+++ b/scripts/lib/resulttool/manualexecution.py
@@ -22,7 +22,7 @@ def load_json_file(f):
22def write_json_file(f, json_data): 22def write_json_file(f, json_data):
23 os.makedirs(os.path.dirname(f), exist_ok=True) 23 os.makedirs(os.path.dirname(f), exist_ok=True)
24 with open(f, 'w') as filedata: 24 with open(f, 'w') as filedata:
25 filedata.write(json.dumps(json_data, sort_keys=True, indent=4)) 25 filedata.write(json.dumps(json_data, sort_keys=True, indent=1))
26 26
27class ManualTestRunner(object): 27class ManualTestRunner(object):
28 28
diff --git a/scripts/lib/resulttool/regression.py b/scripts/lib/resulttool/regression.py
index 10e7d13841..33b3119c54 100644
--- a/scripts/lib/resulttool/regression.py
+++ b/scripts/lib/resulttool/regression.py
@@ -212,6 +212,8 @@ def compare_result(logger, base_name, target_name, base_result, target_result, d
212 212
213 if base_result and target_result: 213 if base_result and target_result:
214 for k in base_result: 214 for k in base_result:
215 if k in ['ptestresult.rawlogs', 'ptestresult.sections']:
216 continue
215 base_testcase = base_result[k] 217 base_testcase = base_result[k]
216 base_status = base_testcase.get('status') 218 base_status = base_testcase.get('status')
217 if base_status: 219 if base_status:
@@ -422,6 +424,7 @@ def register_commands(subparsers):
422 help='(optional) filter the base results to this result ID') 424 help='(optional) filter the base results to this result ID')
423 parser_build.add_argument('-t', '--target-result-id', default='', 425 parser_build.add_argument('-t', '--target-result-id', default='',
424 help='(optional) filter the target results to this result ID') 426 help='(optional) filter the target results to this result ID')
427 parser_build.add_argument('-l', '--limit', default=REGRESSIONS_DISPLAY_LIMIT, help="Maximum number of changes to display per test. Can be set to 0 to print all changes")
425 428
426 parser_build = subparsers.add_parser('regression-git', help='regression git analysis', 429 parser_build = subparsers.add_parser('regression-git', help='regression git analysis',
427 description='regression analysis comparing base result set to target ' 430 description='regression analysis comparing base result set to target '
diff --git a/scripts/lib/resulttool/report.py b/scripts/lib/resulttool/report.py
index a349510ab8..1c100b00ab 100644
--- a/scripts/lib/resulttool/report.py
+++ b/scripts/lib/resulttool/report.py
@@ -256,7 +256,7 @@ class ResultsTextReport(object):
256 if selected_test_case_only: 256 if selected_test_case_only:
257 print_selected_testcase_result(raw_results, selected_test_case_only) 257 print_selected_testcase_result(raw_results, selected_test_case_only)
258 else: 258 else:
259 print(json.dumps(raw_results, sort_keys=True, indent=4)) 259 print(json.dumps(raw_results, sort_keys=True, indent=1))
260 else: 260 else:
261 print('Could not find raw test result for %s' % raw_test) 261 print('Could not find raw test result for %s' % raw_test)
262 return 0 262 return 0
diff --git a/scripts/lib/resulttool/resultutils.py b/scripts/lib/resulttool/resultutils.py
index c5521d81bd..b8fc79a6ac 100644
--- a/scripts/lib/resulttool/resultutils.py
+++ b/scripts/lib/resulttool/resultutils.py
@@ -14,8 +14,11 @@ import scriptpath
14import copy 14import copy
15import urllib.request 15import urllib.request
16import posixpath 16import posixpath
17import logging
17scriptpath.add_oe_lib_path() 18scriptpath.add_oe_lib_path()
18 19
20logger = logging.getLogger('resulttool')
21
19flatten_map = { 22flatten_map = {
20 "oeselftest": [], 23 "oeselftest": [],
21 "runtime": [], 24 "runtime": [],
@@ -31,13 +34,19 @@ regression_map = {
31 "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE'] 34 "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE']
32} 35}
33store_map = { 36store_map = {
34 "oeselftest": ['TEST_TYPE'], 37 "oeselftest": ['TEST_TYPE', 'TESTSERIES', 'MACHINE'],
35 "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'], 38 "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'],
36 "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], 39 "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'],
37 "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], 40 "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'],
38 "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME'] 41 "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME']
39} 42}
40 43
44rawlog_sections = {
45 "ptestresult.rawlogs": "ptest",
46 "ltpresult.rawlogs": "ltp",
47 "ltpposixresult.rawlogs": "ltpposix"
48}
49
41def is_url(p): 50def is_url(p):
42 """ 51 """
43 Helper for determining if the given path is a URL 52 Helper for determining if the given path is a URL
@@ -108,21 +117,57 @@ def filter_resultsdata(results, resultid):
108 newresults[r][i] = results[r][i] 117 newresults[r][i] = results[r][i]
109 return newresults 118 return newresults
110 119
111def strip_ptestresults(results): 120def strip_logs(results):
112 newresults = copy.deepcopy(results) 121 newresults = copy.deepcopy(results)
113 #for a in newresults2:
114 # newresults = newresults2[a]
115 for res in newresults: 122 for res in newresults:
116 if 'result' not in newresults[res]: 123 if 'result' not in newresults[res]:
117 continue 124 continue
118 if 'ptestresult.rawlogs' in newresults[res]['result']: 125 for logtype in rawlog_sections:
119 del newresults[res]['result']['ptestresult.rawlogs'] 126 if logtype in newresults[res]['result']:
127 del newresults[res]['result'][logtype]
120 if 'ptestresult.sections' in newresults[res]['result']: 128 if 'ptestresult.sections' in newresults[res]['result']:
121 for i in newresults[res]['result']['ptestresult.sections']: 129 for i in newresults[res]['result']['ptestresult.sections']:
122 if 'log' in newresults[res]['result']['ptestresult.sections'][i]: 130 if 'log' in newresults[res]['result']['ptestresult.sections'][i]:
123 del newresults[res]['result']['ptestresult.sections'][i]['log'] 131 del newresults[res]['result']['ptestresult.sections'][i]['log']
124 return newresults 132 return newresults
125 133
134# For timing numbers, crazy amounts of precision don't make sense and just confuse
135# the logs. For numbers over 1, trim to 3 decimal places, for numbers less than 1,
136# trim to 4 significant digits
137def trim_durations(results):
138 for res in results:
139 if 'result' not in results[res]:
140 continue
141 for entry in results[res]['result']:
142 if 'duration' in results[res]['result'][entry]:
143 duration = results[res]['result'][entry]['duration']
144 if duration > 1:
145 results[res]['result'][entry]['duration'] = float("%.3f" % duration)
146 elif duration < 1:
147 results[res]['result'][entry]['duration'] = float("%.4g" % duration)
148 return results
149
150def handle_cleanups(results):
151 # Remove pointless path duplication from old format reproducibility results
152 for res2 in results:
153 try:
154 section = results[res2]['result']['reproducible']['files']
155 for pkgtype in section:
156 for filelist in section[pkgtype].copy():
157 if section[pkgtype][filelist] and type(section[pkgtype][filelist][0]) == dict:
158 newlist = []
159 for entry in section[pkgtype][filelist]:
160 newlist.append(entry["reference"].split("/./")[1])
161 section[pkgtype][filelist] = newlist
162
163 except KeyError:
164 pass
165 # Remove pointless duplicate rawlogs data
166 try:
167 del results[res2]['result']['reproducible.rawlogs']
168 except KeyError:
169 pass
170
126def decode_log(logdata): 171def decode_log(logdata):
127 if isinstance(logdata, str): 172 if isinstance(logdata, str):
128 return logdata 173 return logdata
@@ -155,9 +200,6 @@ def generic_get_rawlogs(sectname, results):
155 return None 200 return None
156 return decode_log(results[sectname]['log']) 201 return decode_log(results[sectname]['log'])
157 202
158def ptestresult_get_rawlogs(results):
159 return generic_get_rawlogs('ptestresult.rawlogs', results)
160
161def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False): 203def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False):
162 for res in results: 204 for res in results:
163 if res: 205 if res:
@@ -167,16 +209,20 @@ def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, p
167 os.makedirs(os.path.dirname(dst), exist_ok=True) 209 os.makedirs(os.path.dirname(dst), exist_ok=True)
168 resultsout = results[res] 210 resultsout = results[res]
169 if not ptestjson: 211 if not ptestjson:
170 resultsout = strip_ptestresults(results[res]) 212 resultsout = strip_logs(results[res])
213 trim_durations(resultsout)
214 handle_cleanups(resultsout)
171 with open(dst, 'w') as f: 215 with open(dst, 'w') as f:
172 f.write(json.dumps(resultsout, sort_keys=True, indent=4)) 216 f.write(json.dumps(resultsout, sort_keys=True, indent=1))
173 for res2 in results[res]: 217 for res2 in results[res]:
174 if ptestlogs and 'result' in results[res][res2]: 218 if ptestlogs and 'result' in results[res][res2]:
175 seriesresults = results[res][res2]['result'] 219 seriesresults = results[res][res2]['result']
176 rawlogs = ptestresult_get_rawlogs(seriesresults) 220 for logtype in rawlog_sections:
177 if rawlogs is not None: 221 logdata = generic_get_rawlogs(logtype, seriesresults)
178 with open(dst.replace(fn, "ptest-raw.log"), "w+") as f: 222 if logdata is not None:
179 f.write(rawlogs) 223 logger.info("Extracting " + rawlog_sections[logtype] + "-raw.log")
224 with open(dst.replace(fn, rawlog_sections[logtype] + "-raw.log"), "w+") as f:
225 f.write(logdata)
180 if 'ptestresult.sections' in seriesresults: 226 if 'ptestresult.sections' in seriesresults:
181 for i in seriesresults['ptestresult.sections']: 227 for i in seriesresults['ptestresult.sections']:
182 sectionlog = ptestresult_get_log(seriesresults, i) 228 sectionlog = ptestresult_get_log(seriesresults, i)
diff --git a/scripts/lib/resulttool/store.py b/scripts/lib/resulttool/store.py
index e0951f0a8f..b143334e69 100644
--- a/scripts/lib/resulttool/store.py
+++ b/scripts/lib/resulttool/store.py
@@ -65,18 +65,35 @@ def store(args, logger):
65 65
66 for r in revisions: 66 for r in revisions:
67 results = revisions[r] 67 results = revisions[r]
68 if args.revision and r[0] != args.revision:
69 logger.info('skipping %s as non-matching' % r[0])
70 continue
68 keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]} 71 keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]}
69 subprocess.check_call(["find", tempdir, "!", "-path", "./.git/*", "-delete"]) 72 subprocess.check_call(["find", tempdir, "-name", "testresults.json", "!", "-path", "./.git/*", "-delete"])
70 resultutils.save_resultsdata(results, tempdir, ptestlogs=True) 73 resultutils.save_resultsdata(results, tempdir, ptestlogs=True)
71 74
72 logger.info('Storing test result into git repository %s' % args.git_dir) 75 logger.info('Storing test result into git repository %s' % args.git_dir)
73 76
74 gitarchive.gitarchive(tempdir, args.git_dir, False, False, 77 excludes = []
78 if args.logfile_archive:
79 excludes = ['*.log', "*.log.zst"]
80
81 tagname = gitarchive.gitarchive(tempdir, args.git_dir, False, False,
75 "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", 82 "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
76 False, "{branch}/{commit_count}-g{commit}/{tag_number}", 83 False, "{branch}/{commit_count}-g{commit}/{tag_number}",
77 'Test run #{tag_number} of {branch}:{commit}', '', 84 'Test run #{tag_number} of {branch}:{commit}', '',
78 [], [], False, keywords, logger) 85 excludes, [], False, keywords, logger)
79 86
87 if args.logfile_archive:
88 logdir = args.logfile_archive + "/" + tagname
89 shutil.copytree(tempdir, logdir)
90 os.chmod(logdir, 0o755)
91 for root, dirs, files in os.walk(logdir):
92 for name in files:
93 if not name.endswith(".log"):
94 continue
95 f = os.path.join(root, name)
96 subprocess.run(["zstd", f, "--rm"], check=True, capture_output=True)
80 finally: 97 finally:
81 subprocess.check_call(["rm", "-rf", tempdir]) 98 subprocess.check_call(["rm", "-rf", tempdir])
82 99
@@ -102,3 +119,7 @@ def register_commands(subparsers):
102 help='add executed-by configuration to each result file') 119 help='add executed-by configuration to each result file')
103 parser_build.add_argument('-t', '--extra-test-env', default='', 120 parser_build.add_argument('-t', '--extra-test-env', default='',
104 help='add extra test environment data to each result file configuration') 121 help='add extra test environment data to each result file configuration')
122 parser_build.add_argument('-r', '--revision', default='',
123 help='only store data for the specified revision')
124 parser_build.add_argument('-l', '--logfile-archive', default='',
125 help='directory to separately archive log files along with a copy of the results')
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py
index f23e53cba9..32e749dbb1 100644
--- a/scripts/lib/scriptutils.py
+++ b/scripts/lib/scriptutils.py
@@ -179,8 +179,13 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr
179 f.write('SRCREV = "%s"\n' % srcrev) 179 f.write('SRCREV = "%s"\n' % srcrev)
180 f.write('PV = "0.0+"\n') 180 f.write('PV = "0.0+"\n')
181 f.write('WORKDIR = "%s"\n' % tmpworkdir) 181 f.write('WORKDIR = "%s"\n' % tmpworkdir)
182 f.write('UNPACKDIR = "%s"\n' % destdir)
183
182 # Set S out of the way so it doesn't get created under the workdir 184 # Set S out of the way so it doesn't get created under the workdir
183 f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) 185 s_dir = os.path.join(tmpdir, 'emptysrc')
186 bb.utils.mkdirhier(s_dir)
187 f.write('S = "%s"\n' % s_dir)
188
184 if not mirrors: 189 if not mirrors:
185 # We do not need PREMIRRORS since we are almost certainly 190 # We do not need PREMIRRORS since we are almost certainly
186 # fetching new source rather than something that has already 191 # fetching new source rather than something that has already
@@ -232,10 +237,6 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr
232 if e.errno != errno.ENOTEMPTY: 237 if e.errno != errno.ENOTEMPTY:
233 raise 238 raise
234 239
235 bb.utils.mkdirhier(destdir)
236 for fn in os.listdir(tmpworkdir):
237 shutil.move(os.path.join(tmpworkdir, fn), destdir)
238
239 finally: 240 finally:
240 if not preserve_tmp: 241 if not preserve_tmp:
241 shutil.rmtree(tmpdir) 242 shutil.rmtree(tmpdir)
@@ -271,12 +272,3 @@ def is_src_url(param):
271 return True 272 return True
272 return False 273 return False
273 274
274def filter_src_subdirs(pth):
275 """
276 Filter out subdirectories of initial unpacked source trees that we do not care about.
277 Used by devtool and recipetool.
278 """
279 dirlist = os.listdir(pth)
280 filterout = ['git.indirectionsymlink', 'source-date-epoch', 'sstate-install-recipe_qa']
281 dirlist = [x for x in dirlist if x not in filterout]
282 return dirlist
diff --git a/scripts/lib/wic/canned-wks/common.wks.inc b/scripts/lib/wic/canned-wks/common.wks.inc
index 89880b417b..4a440ddafe 100644
--- a/scripts/lib/wic/canned-wks/common.wks.inc
+++ b/scripts/lib/wic/canned-wks/common.wks.inc
@@ -1,3 +1,3 @@
1# This file is included into 3 canned wks files from this directory 1# This file is included into 3 canned wks files from this directory
2part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 2part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024
3part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024 3part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024
diff --git a/scripts/lib/wic/canned-wks/directdisk-gpt.wks b/scripts/lib/wic/canned-wks/directdisk-gpt.wks
index 8d7d8de6ea..cb640056f1 100644
--- a/scripts/lib/wic/canned-wks/directdisk-gpt.wks
+++ b/scripts/lib/wic/canned-wks/directdisk-gpt.wks
@@ -3,7 +3,7 @@
3# can directly dd to boot media. 3# can directly dd to boot media.
4 4
5 5
6part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 6part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024
7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid 7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
8 8
9bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8" 9bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8"
diff --git a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks
index f61d941d6d..4fd1999ffb 100644
--- a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks
+++ b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks
@@ -15,7 +15,7 @@
15# 15#
16# - or any combinations of -r and --rootfs command line options 16# - or any combinations of -r and --rootfs command line options
17 17
18part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 18part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024
19part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024 19part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024
20part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024 20part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024
21 21
diff --git a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
index 2fd286ff98..5211972955 100644
--- a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
+++ b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
@@ -1,3 +1,3 @@
1bootloader --ptable gpt 1bootloader --ptable gpt
2part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.1 2part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.2
3part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ 3part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/
diff --git a/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in
new file mode 100644
index 0000000000..cac0fa32cd
--- /dev/null
+++ b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in
@@ -0,0 +1,3 @@
1bootloader --ptable gpt --timeout=5
2part /boot --source bootimg_efi --sourceparams="loader=${EFI_PROVIDER}" --label boot --active --align 1024 --use-uuid --part-name="ESP" --part-type=C12A7328-F81F-11D2-BA4B-00A0C93EC93B --overhead-factor=1
3part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/
diff --git a/scripts/lib/wic/canned-wks/mkefidisk.wks b/scripts/lib/wic/canned-wks/mkefidisk.wks
index 9f534fe184..16dfe76dfe 100644
--- a/scripts/lib/wic/canned-wks/mkefidisk.wks
+++ b/scripts/lib/wic/canned-wks/mkefidisk.wks
@@ -2,10 +2,10 @@
2# long-description: Creates a partitioned EFI disk image that the user 2# long-description: Creates a partitioned EFI disk image that the user
3# can directly dd to boot media. 3# can directly dd to boot media.
4 4
5part /boot --source bootimg-efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024 5part /boot --source bootimg_efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024
6 6
7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid 7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
8 8
9part swap --ondisk sda --size 44 --label swap1 --fstype=swap 9part swap --ondisk sda --size 44 --label swap1 --fstype=swap
10 10
11bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=ttyS0,115200 console=tty0" 11bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=${KERNEL_CONSOLE} console=tty0"
diff --git a/scripts/lib/wic/canned-wks/mkhybridiso.wks b/scripts/lib/wic/canned-wks/mkhybridiso.wks
index 48c5ac4791..c3a030e5b4 100644
--- a/scripts/lib/wic/canned-wks/mkhybridiso.wks
+++ b/scripts/lib/wic/canned-wks/mkhybridiso.wks
@@ -2,6 +2,6 @@
2# long-description: Creates an EFI and legacy bootable hybrid ISO image 2# long-description: Creates an EFI and legacy bootable hybrid ISO image
3# which can be used on optical media as well as USB media. 3# which can be used on optical media as well as USB media.
4 4
5part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO 5part /boot --source isoimage_isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO
6 6
7bootloader --timeout=15 --append="" 7bootloader --timeout=15 --append=""
diff --git a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks
index 63bc4dab6a..f9f8044f7d 100644
--- a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks
+++ b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks
@@ -2,5 +2,5 @@
2# long-description: Creates a partitioned SD card image. Boot files 2# long-description: Creates a partitioned SD card image. Boot files
3# are located in the first vfat partition. 3# are located in the first vfat partition.
4 4
5part /boot --source bootimg-partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16 5part /boot --source bootimg_partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16
6part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4 6part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4
diff --git a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks
index 95d7b97a60..3fb2c0e35f 100644
--- a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks
+++ b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks
@@ -2,7 +2,7 @@
2# long-description: Creates a partitioned EFI disk image that the user 2# long-description: Creates a partitioned EFI disk image that the user
3# can directly dd to boot media. The selected bootloader is systemd-boot. 3# can directly dd to boot media. The selected bootloader is systemd-boot.
4 4
5part /boot --source bootimg-efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid 5part /boot --source bootimg_efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid
6 6
7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid 7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
8 8
diff --git a/scripts/lib/wic/engine.py b/scripts/lib/wic/engine.py
index 674ccfc244..b9e60cbe4e 100644
--- a/scripts/lib/wic/engine.py
+++ b/scripts/lib/wic/engine.py
@@ -180,6 +180,8 @@ def wic_create(wks_file, rootfs_dir, bootimg_dir, kernel_dir,
180 os.makedirs(options.outdir) 180 os.makedirs(options.outdir)
181 181
182 pname = options.imager 182 pname = options.imager
183 # Don't support '-' in plugin names
184 pname = pname.replace("-", "_")
183 plugin_class = PluginMgr.get_plugins('imager').get(pname) 185 plugin_class = PluginMgr.get_plugins('imager').get(pname)
184 if not plugin_class: 186 if not plugin_class:
185 raise WicError('Unknown plugin: %s' % pname) 187 raise WicError('Unknown plugin: %s' % pname)
@@ -232,6 +234,16 @@ class Disk:
232 self._psector_size = None 234 self._psector_size = None
233 self._ptable_format = None 235 self._ptable_format = None
234 236
237 # define sector size
238 sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE')
239 if sector_size_str is not None:
240 try:
241 self.sector_size = int(sector_size_str)
242 except ValueError:
243 self.sector_size = None
244 else:
245 self.sector_size = None
246
235 # find parted 247 # find parted
236 # read paths from $PATH environment variable 248 # read paths from $PATH environment variable
237 # if it fails, use hardcoded paths 249 # if it fails, use hardcoded paths
@@ -258,7 +270,13 @@ class Disk:
258 def get_partitions(self): 270 def get_partitions(self):
259 if self._partitions is None: 271 if self._partitions is None:
260 self._partitions = OrderedDict() 272 self._partitions = OrderedDict()
261 out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath)) 273
274 if self.sector_size is not None:
275 out = exec_cmd("export PARTED_SECTOR_SIZE=%d; %s -sm %s unit B print" % \
276 (self.sector_size, self.parted, self.imagepath), True)
277 else:
278 out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath))
279
262 parttype = namedtuple("Part", "pnum start end size fstype") 280 parttype = namedtuple("Part", "pnum start end size fstype")
263 splitted = out.splitlines() 281 splitted = out.splitlines()
264 # skip over possible errors in exec_cmd output 282 # skip over possible errors in exec_cmd output
@@ -359,7 +377,7 @@ class Disk:
359 Remove files/dirs and their contents from the partition. 377 Remove files/dirs and their contents from the partition.
360 This only applies to ext* partition. 378 This only applies to ext* partition.
361 """ 379 """
362 abs_path = re.sub('\/\/+', '/', path) 380 abs_path = re.sub(r'\/\/+', '/', path)
363 cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs, 381 cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs,
364 self._get_part_image(pnum), 382 self._get_part_image(pnum),
365 abs_path) 383 abs_path)
diff --git a/scripts/lib/wic/help.py b/scripts/lib/wic/help.py
index 163535e431..2e3061f343 100644
--- a/scripts/lib/wic/help.py
+++ b/scripts/lib/wic/help.py
@@ -544,18 +544,18 @@ DESCRIPTION
544 the --source param given to that partition. For example, if the 544 the --source param given to that partition. For example, if the
545 partition is set up like this: 545 partition is set up like this:
546 546
547 part /boot --source bootimg-pcbios ... 547 part /boot --source bootimg_pcbios ...
548 548
549 then the methods defined as class members of the plugin having the 549 then the methods defined as class members of the plugin having the
550 matching bootimg-pcbios .name class member would be used. 550 matching bootimg_pcbios .name class member would be used.
551 551
552 To be more concrete, here's the plugin definition that would match 552 To be more concrete, here's the plugin definition that would match
553 a '--source bootimg-pcbios' usage, along with an example method 553 a '--source bootimg_pcbios' usage, along with an example method
554 that would be called by the wic implementation when it needed to 554 that would be called by the wic implementation when it needed to
555 invoke an implementation-specific partition-preparation function: 555 invoke an implementation-specific partition-preparation function:
556 556
557 class BootimgPcbiosPlugin(SourcePlugin): 557 class BootimgPcbiosPlugin(SourcePlugin):
558 name = 'bootimg-pcbios' 558 name = 'bootimg_pcbios'
559 559
560 @classmethod 560 @classmethod
561 def do_prepare_partition(self, part, ...) 561 def do_prepare_partition(self, part, ...)
@@ -794,7 +794,7 @@ DESCRIPTION
794 794
795 Here is a content of test.wks: 795 Here is a content of test.wks:
796 796
797 part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 797 part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024
798 part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024 798 part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024
799 799
800 bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0" 800 bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0"
@@ -916,6 +916,10 @@ DESCRIPTION
916 will create empty partition. --size parameter has 916 will create empty partition. --size parameter has
917 to be used to specify size of empty partition. 917 to be used to specify size of empty partition.
918 918
919 --sourceparams: This option is specific to wic. Supply additional
920 parameters to the source plugin in
921 key1=value1,key2 format.
922
919 --ondisk or --ondrive: Forces the partition to be created on 923 --ondisk or --ondrive: Forces the partition to be created on
920 a particular disk. 924 a particular disk.
921 925
@@ -932,6 +936,7 @@ DESCRIPTION
932 squashfs 936 squashfs
933 erofs 937 erofs
934 swap 938 swap
939 none
935 940
936 --fsoptions: Specifies a free-form string of options to be 941 --fsoptions: Specifies a free-form string of options to be
937 used when mounting the filesystem. This string 942 used when mounting the filesystem. This string
@@ -965,6 +970,14 @@ DESCRIPTION
965 to start a partition on an x KBytes 970 to start a partition on an x KBytes
966 boundary. 971 boundary.
967 972
973 --offset: This option is specific to wic that says to place a partition
974 at exactly the specified offset. If the partition cannot be
975 placed at the specified offset, the image build will fail.
976 Specify as an integer value optionally followed by one of the
977 units s/S for 512 byte sector, k/K for kibibyte, M for
978 mebibyte and G for gibibyte. The default unit if none is
979 given is k.
980
968 --no-table: This option is specific to wic. Space will be 981 --no-table: This option is specific to wic. Space will be
969 reserved for the partition and it will be 982 reserved for the partition and it will be
970 populated but it will not be added to the 983 populated but it will not be added to the
@@ -1045,6 +1058,18 @@ DESCRIPTION
1045 not take effect when --mkfs-extraopts is used. This should be taken into 1058 not take effect when --mkfs-extraopts is used. This should be taken into
1046 account when using --mkfs-extraopts. 1059 account when using --mkfs-extraopts.
1047 1060
1061 --type: This option is specific to wic. Valid values are 'primary',
1062 'logical'. For msdos partition tables, this option specifies
1063 the partition type.
1064
1065 --hidden: This option is specific to wic. This option sets the
1066 RequiredPartition bit (bit 0) on GPT partitions.
1067
1068 --mbr: This option is specific to wic. This option is used with the
1069 gpt-hybrid partition type that uses both a GPT partition and
1070 an MBR header. Partitions with this flag will be included in
1071 this MBR header.
1072
1048 * bootloader 1073 * bootloader
1049 1074
1050 This command allows the user to specify various bootloader 1075 This command allows the user to specify various bootloader
@@ -1063,6 +1088,13 @@ DESCRIPTION
1063 file. Using this option will override any other 1088 file. Using this option will override any other
1064 bootloader option. 1089 bootloader option.
1065 1090
1091 --ptable: Specifies the partition table format. Valid values are
1092 'msdos', 'gpt', 'gpt-hybrid'.
1093
1094 --source: Specifies the source plugin. If not specified, the
1095 --source value will be copied from the partition that has
1096 /boot as mountpoint.
1097
1066 Note that bootloader functionality and boot partitions are 1098 Note that bootloader functionality and boot partitions are
1067 implemented by the various --source plugins that implement 1099 implemented by the various --source plugins that implement
1068 bootloader functionality; the bootloader command essentially 1100 bootloader functionality; the bootloader command essentially
diff --git a/scripts/lib/wic/partition.py b/scripts/lib/wic/partition.py
index 795707ec5d..b34691d313 100644
--- a/scripts/lib/wic/partition.py
+++ b/scripts/lib/wic/partition.py
@@ -164,6 +164,9 @@ class Partition():
164 164
165 plugins = PluginMgr.get_plugins('source') 165 plugins = PluginMgr.get_plugins('source')
166 166
167 # Don't support '-' in plugin names
168 self.source = self.source.replace("-", "_")
169
167 if self.source not in plugins: 170 if self.source not in plugins:
168 raise WicError("The '%s' --source specified for %s doesn't exist.\n\t" 171 raise WicError("The '%s' --source specified for %s doesn't exist.\n\t"
169 "See 'wic list source-plugins' for a list of available" 172 "See 'wic list source-plugins' for a list of available"
@@ -178,7 +181,7 @@ class Partition():
178 splitted = self.sourceparams.split(',') 181 splitted = self.sourceparams.split(',')
179 srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par) 182 srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par)
180 183
181 plugin = PluginMgr.get_plugins('source')[self.source] 184 plugin = plugins[self.source]
182 plugin.do_configure_partition(self, srcparams_dict, creator, 185 plugin.do_configure_partition(self, srcparams_dict, creator,
183 cr_workdir, oe_builddir, bootimg_dir, 186 cr_workdir, oe_builddir, bootimg_dir,
184 kernel_dir, native_sysroot) 187 kernel_dir, native_sysroot)
@@ -222,19 +225,19 @@ class Partition():
222 if (pseudo_dir): 225 if (pseudo_dir):
223 # Canonicalize the ignore paths. This corresponds to 226 # Canonicalize the ignore paths. This corresponds to
224 # calling oe.path.canonicalize(), which is used in bitbake.conf. 227 # calling oe.path.canonicalize(), which is used in bitbake.conf.
225 ignore_paths = [rootfs] + (get_bitbake_var("PSEUDO_IGNORE_PATHS") or "").split(",") 228 include_paths = [rootfs_dir] + (get_bitbake_var("PSEUDO_INCLUDE_PATHS") or "").split(",")
226 canonical_paths = [] 229 canonical_paths = []
227 for path in ignore_paths: 230 for path in include_paths:
228 if "$" not in path: 231 if "$" not in path:
229 trailing_slash = path.endswith("/") and "/" or "" 232 trailing_slash = path.endswith("/") and "/" or ""
230 canonical_paths.append(os.path.realpath(path) + trailing_slash) 233 canonical_paths.append(os.path.realpath(path) + trailing_slash)
231 ignore_paths = ",".join(canonical_paths) 234 include_paths = ",".join(canonical_paths)
232 235
233 pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix 236 pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix
234 pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir 237 pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir
235 pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir 238 pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir
236 pseudo += "export PSEUDO_NOSYMLINKEXP=1;" 239 pseudo += "export PSEUDO_NOSYMLINKEXP=1;"
237 pseudo += "export PSEUDO_IGNORE_PATHS=%s;" % ignore_paths 240 pseudo += "export PSEUDO_INCLUDE_PATHS=%s;" % include_paths
238 pseudo += "%s " % get_bitbake_var("FAKEROOTCMD") 241 pseudo += "%s " % get_bitbake_var("FAKEROOTCMD")
239 else: 242 else:
240 pseudo = None 243 pseudo = None
@@ -244,7 +247,7 @@ class Partition():
244 # from bitbake variable 247 # from bitbake variable
245 rsize_bb = get_bitbake_var('ROOTFS_SIZE') 248 rsize_bb = get_bitbake_var('ROOTFS_SIZE')
246 rdir = get_bitbake_var('IMAGE_ROOTFS') 249 rdir = get_bitbake_var('IMAGE_ROOTFS')
247 if rsize_bb and rdir == rootfs_dir: 250 if rsize_bb and (rdir == rootfs_dir or (rootfs_dir.split('/')[-2] == "tmp-wic" and rootfs_dir.split('/')[-1][:6] == "rootfs")):
248 # Bitbake variable ROOTFS_SIZE is calculated in 251 # Bitbake variable ROOTFS_SIZE is calculated in
249 # Image._get_rootfs_size method from meta/lib/oe/image.py 252 # Image._get_rootfs_size method from meta/lib/oe/image.py
250 # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT, 253 # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT,
@@ -284,19 +287,8 @@ class Partition():
284 287
285 extraopts = self.mkfs_extraopts or "-F -i 8192" 288 extraopts = self.mkfs_extraopts or "-F -i 8192"
286 289
287 if os.getenv('SOURCE_DATE_EPOCH'): 290 # use hash_seed to generate reproducible ext4 images
288 sde_time = int(os.getenv('SOURCE_DATE_EPOCH')) 291 (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, pseudo)
289 if pseudo:
290 pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo)
291 else:
292 pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time
293
294 # Set hash_seed to generate deterministic directory indexes
295 namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460")
296 if self.fsuuid:
297 namespace = uuid.UUID(self.fsuuid)
298 hash_seed = str(uuid.uuid5(namespace, str(sde_time)))
299 extraopts += " -E hash_seed=%s" % hash_seed
300 292
301 label_str = "" 293 label_str = ""
302 if self.label: 294 if self.label:
@@ -344,6 +336,23 @@ class Partition():
344 336
345 self.check_for_Y2038_problem(rootfs, native_sysroot) 337 self.check_for_Y2038_problem(rootfs, native_sysroot)
346 338
339 def get_hash_seed_ext4(self, extraopts, pseudo):
340 if os.getenv('SOURCE_DATE_EPOCH'):
341 sde_time = int(os.getenv('SOURCE_DATE_EPOCH'))
342 if pseudo:
343 pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo)
344 else:
345 pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time
346
347 # Set hash_seed to generate deterministic directory indexes
348 namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460")
349 if self.fsuuid:
350 namespace = uuid.UUID(self.fsuuid)
351 hash_seed = str(uuid.uuid5(namespace, str(sde_time)))
352 extraopts += " -E hash_seed=%s" % hash_seed
353
354 return (extraopts, pseudo)
355
347 def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, 356 def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir,
348 native_sysroot, pseudo): 357 native_sysroot, pseudo):
349 """ 358 """
@@ -437,13 +446,16 @@ class Partition():
437 446
438 extraopts = self.mkfs_extraopts or "-i 8192" 447 extraopts = self.mkfs_extraopts or "-i 8192"
439 448
449 # use hash_seed to generate reproducible ext4 images
450 (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, None)
451
440 label_str = "" 452 label_str = ""
441 if self.label: 453 if self.label:
442 label_str = "-L %s" % self.label 454 label_str = "-L %s" % self.label
443 455
444 mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \ 456 mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \
445 (self.fstype, extraopts, label_str, self.fsuuid, rootfs) 457 (self.fstype, extraopts, label_str, self.fsuuid, rootfs)
446 exec_native_cmd(mkfs_cmd, native_sysroot) 458 exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
447 459
448 self.check_for_Y2038_problem(rootfs, native_sysroot) 460 self.check_for_Y2038_problem(rootfs, native_sysroot)
449 461
diff --git a/scripts/lib/wic/pluginbase.py b/scripts/lib/wic/pluginbase.py
index b64568339b..640da292d3 100644
--- a/scripts/lib/wic/pluginbase.py
+++ b/scripts/lib/wic/pluginbase.py
@@ -44,7 +44,7 @@ class PluginMgr:
44 path = os.path.join(layer_path, script_plugin_dir) 44 path = os.path.join(layer_path, script_plugin_dir)
45 path = os.path.abspath(os.path.expanduser(path)) 45 path = os.path.abspath(os.path.expanduser(path))
46 if path not in cls._plugin_dirs and os.path.isdir(path): 46 if path not in cls._plugin_dirs and os.path.isdir(path):
47 cls._plugin_dirs.insert(0, path) 47 cls._plugin_dirs.append(path)
48 48
49 if ptype not in PLUGINS: 49 if ptype not in PLUGINS:
50 # load all ptype plugins 50 # load all ptype plugins
diff --git a/scripts/lib/wic/plugins/imager/direct.py b/scripts/lib/wic/plugins/imager/direct.py
index a1d152659b..6e1f1c8cba 100644
--- a/scripts/lib/wic/plugins/imager/direct.py
+++ b/scripts/lib/wic/plugins/imager/direct.py
@@ -203,6 +203,8 @@ class DirectPlugin(ImagerPlugin):
203 source_plugin = self.ks.bootloader.source 203 source_plugin = self.ks.bootloader.source
204 disk_name = self.parts[0].disk 204 disk_name = self.parts[0].disk
205 if source_plugin: 205 if source_plugin:
206 # Don't support '-' in plugin names
207 source_plugin = source_plugin.replace("-", "_")
206 plugin = PluginMgr.get_plugins('source')[source_plugin] 208 plugin = PluginMgr.get_plugins('source')[source_plugin]
207 plugin.do_install_disk(self._image, disk_name, self, self.workdir, 209 plugin.do_install_disk(self._image, disk_name, self, self.workdir,
208 self.oe_builddir, self.bootimg_dir, 210 self.oe_builddir, self.bootimg_dir,
@@ -321,7 +323,15 @@ class PartitionedImage():
321 self.partitions = partitions 323 self.partitions = partitions
322 self.partimages = [] 324 self.partimages = []
323 # Size of a sector used in calculations 325 # Size of a sector used in calculations
324 self.sector_size = SECTOR_SIZE 326 sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE')
327 if sector_size_str is not None:
328 try:
329 self.sector_size = int(sector_size_str)
330 except ValueError:
331 self.sector_size = SECTOR_SIZE
332 else:
333 self.sector_size = SECTOR_SIZE
334
325 self.native_sysroot = native_sysroot 335 self.native_sysroot = native_sysroot
326 num_real_partitions = len([p for p in self.partitions if not p.no_table]) 336 num_real_partitions = len([p for p in self.partitions if not p.no_table])
327 self.extra_space = extra_space 337 self.extra_space = extra_space
@@ -508,7 +518,8 @@ class PartitionedImage():
508 logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors", 518 logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors",
509 parttype, start, end, size) 519 parttype, start, end, size)
510 520
511 cmd = "parted -s %s unit s mkpart %s" % (device, parttype) 521 cmd = "export PARTED_SECTOR_SIZE=%d; parted -s %s unit s mkpart %s" % \
522 (self.sector_size, device, parttype)
512 if fstype: 523 if fstype:
513 cmd += " %s" % fstype 524 cmd += " %s" % fstype
514 cmd += " %d %d" % (start, end) 525 cmd += " %d %d" % (start, end)
@@ -527,8 +538,8 @@ class PartitionedImage():
527 os.ftruncate(sparse.fileno(), min_size) 538 os.ftruncate(sparse.fileno(), min_size)
528 539
529 logger.debug("Initializing partition table for %s", device) 540 logger.debug("Initializing partition table for %s", device)
530 exec_native_cmd("parted -s %s mklabel %s" % (device, ptable_format), 541 exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s mklabel %s" %
531 self.native_sysroot) 542 (self.sector_size, device, ptable_format), self.native_sysroot)
532 543
533 def _write_disk_guid(self): 544 def _write_disk_guid(self):
534 if self.ptable_format in ('gpt', 'gpt-hybrid'): 545 if self.ptable_format in ('gpt', 'gpt-hybrid'):
@@ -538,7 +549,8 @@ class PartitionedImage():
538 self.disk_guid = uuid.uuid4() 549 self.disk_guid = uuid.uuid4()
539 550
540 logger.debug("Set disk guid %s", self.disk_guid) 551 logger.debug("Set disk guid %s", self.disk_guid)
541 sfdisk_cmd = "sfdisk --disk-id %s %s" % (self.path, self.disk_guid) 552 sfdisk_cmd = "sfdisk --sector-size %s --disk-id %s %s" % \
553 (self.sector_size, self.path, self.disk_guid)
542 exec_native_cmd(sfdisk_cmd, self.native_sysroot) 554 exec_native_cmd(sfdisk_cmd, self.native_sysroot)
543 555
544 def create(self): 556 def create(self):
@@ -613,45 +625,44 @@ class PartitionedImage():
613 partition_label = part.part_name if part.part_name else part.label 625 partition_label = part.part_name if part.part_name else part.label
614 logger.debug("partition %d: set name to %s", 626 logger.debug("partition %d: set name to %s",
615 part.num, partition_label) 627 part.num, partition_label)
616 exec_native_cmd("sgdisk --change-name=%d:%s %s" % \ 628 exec_native_cmd("sfdisk --sector-size %s --part-label %s %d %s" % \
617 (part.num, partition_label, 629 (self.sector_size, self.path, part.num,
618 self.path), self.native_sysroot) 630 partition_label), self.native_sysroot)
619
620 if part.part_type: 631 if part.part_type:
621 logger.debug("partition %d: set type UID to %s", 632 logger.debug("partition %d: set type UID to %s",
622 part.num, part.part_type) 633 part.num, part.part_type)
623 exec_native_cmd("sgdisk --typecode=%d:%s %s" % \ 634 exec_native_cmd("sfdisk --sector-size %s --part-type %s %d %s" % \
624 (part.num, part.part_type, 635 (self.sector_size, self.path, part.num,
625 self.path), self.native_sysroot) 636 part.part_type), self.native_sysroot)
626 637
627 if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"): 638 if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"):
628 logger.debug("partition %d: set UUID to %s", 639 logger.debug("partition %d: set UUID to %s",
629 part.num, part.uuid) 640 part.num, part.uuid)
630 exec_native_cmd("sgdisk --partition-guid=%d:%s %s" % \ 641 exec_native_cmd("sfdisk --sector-size %s --part-uuid %s %d %s" % \
631 (part.num, part.uuid, self.path), 642 (self.sector_size, self.path, part.num, part.uuid),
632 self.native_sysroot) 643 self.native_sysroot)
633 644
634 if part.active: 645 if part.active:
635 flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot" 646 flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot"
636 logger.debug("Set '%s' flag for partition '%s' on disk '%s'", 647 logger.debug("Set '%s' flag for partition '%s' on disk '%s'",
637 flag_name, part.num, self.path) 648 flag_name, part.num, self.path)
638 exec_native_cmd("parted -s %s set %d %s on" % \ 649 exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \
639 (self.path, part.num, flag_name), 650 (self.sector_size, self.path, part.num, flag_name),
640 self.native_sysroot) 651 self.native_sysroot)
641 if self.ptable_format == 'gpt-hybrid' and part.mbr: 652 if self.ptable_format == 'gpt-hybrid' and part.mbr:
642 exec_native_cmd("parted -s %s set %d %s on" % \ 653 exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \
643 (mbr_path, hybrid_mbr_part_num, "boot"), 654 (self.sector_size, mbr_path, hybrid_mbr_part_num, "boot"),
644 self.native_sysroot) 655 self.native_sysroot)
645 if part.system_id: 656 if part.system_id:
646 exec_native_cmd("sfdisk --part-type %s %s %s" % \ 657 exec_native_cmd("sfdisk --sector-size %s --part-type %s %s %s" % \
647 (self.path, part.num, part.system_id), 658 (self.sector_size, self.path, part.num, part.system_id),
648 self.native_sysroot) 659 self.native_sysroot)
649 660
650 if part.hidden and self.ptable_format == "gpt": 661 if part.hidden and self.ptable_format == "gpt":
651 logger.debug("Set hidden attribute for partition '%s' on disk '%s'", 662 logger.debug("Set hidden attribute for partition '%s' on disk '%s'",
652 part.num, self.path) 663 part.num, self.path)
653 exec_native_cmd("sfdisk --part-attrs %s %s RequiredPartition" % \ 664 exec_native_cmd("sfdisk --sector-size %s --part-attrs %s %s RequiredPartition" % \
654 (self.path, part.num), 665 (self.sector_size, self.path, part.num),
655 self.native_sysroot) 666 self.native_sysroot)
656 667
657 if self.ptable_format == "gpt-hybrid": 668 if self.ptable_format == "gpt-hybrid":
@@ -664,7 +675,8 @@ class PartitionedImage():
664 # create with an arbitrary type, then change it to the correct type 675 # create with an arbitrary type, then change it to the correct type
665 # with sfdisk 676 # with sfdisk
666 self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD) 677 self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD)
667 exec_native_cmd("sfdisk --part-type %s %d 0xee" % (mbr_path, hybrid_mbr_part_num), 678 exec_native_cmd("sfdisk --sector-size %s --part-type %s %d 0xee" % \
679 (self.sector_size, mbr_path, hybrid_mbr_part_num),
668 self.native_sysroot) 680 self.native_sysroot)
669 681
670 # Copy hybrid MBR 682 # Copy hybrid MBR
diff --git a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py b/scripts/lib/wic/plugins/source/bootimg_biosplusefi.py
index 5bd7390680..4279ddded8 100644
--- a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py
+++ b/scripts/lib/wic/plugins/source/bootimg_biosplusefi.py
@@ -13,7 +13,7 @@
13# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 13# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
14# 14#
15# DESCRIPTION 15# DESCRIPTION
16# This implements the 'bootimg-biosplusefi' source plugin class for 'wic' 16# This implements the 'bootimg_biosplusefi' source plugin class for 'wic'
17# 17#
18# AUTHORS 18# AUTHORS
19# William Bourque <wbourque [at) gmail.com> 19# William Bourque <wbourque [at) gmail.com>
@@ -34,7 +34,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
34 34
35 Note it is possible to create an image that can boot from both 35 Note it is possible to create an image that can boot from both
36 legacy BIOS and EFI by defining two partitions : one with arg 36 legacy BIOS and EFI by defining two partitions : one with arg
37 --source bootimg-efi and another one with --source bootimg-pcbios. 37 --source bootimg_efi and another one with --source bootimg_pcbios.
38 However, this method has the obvious downside that it requires TWO 38 However, this method has the obvious downside that it requires TWO
39 partitions to be created on the storage device. 39 partitions to be created on the storage device.
40 Both partitions will also be marked as "bootable" which does not work on 40 Both partitions will also be marked as "bootable" which does not work on
@@ -45,7 +45,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
45 the first partition will be duplicated into the second, even though it 45 the first partition will be duplicated into the second, even though it
46 will not be used at all. 46 will not be used at all.
47 47
48 Also, unlike "isoimage-isohybrid" that also does BIOS and EFI, this plugin 48 Also, unlike "isoimage_isohybrid" that also does BIOS and EFI, this plugin
49 allows you to have more than only a single rootfs partitions and does 49 allows you to have more than only a single rootfs partitions and does
50 not turn the rootfs into an initramfs RAM image. 50 not turn the rootfs into an initramfs RAM image.
51 51
@@ -53,32 +53,32 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
53 does not have the limitations listed above. 53 does not have the limitations listed above.
54 54
55 The plugin is made so it does tries not to reimplement what's already 55 The plugin is made so it does tries not to reimplement what's already
56 been done in other plugins; as such it imports "bootimg-pcbios" 56 been done in other plugins; as such it imports "bootimg_pcbios"
57 and "bootimg-efi". 57 and "bootimg_efi".
58 Plugin "bootimg-pcbios" is used to generate legacy BIOS boot. 58 Plugin "bootimg_pcbios" is used to generate legacy BIOS boot.
59 Plugin "bootimg-efi" is used to generate the UEFI boot. Note that it 59 Plugin "bootimg_efi" is used to generate the UEFI boot. Note that it
60 requires a --sourceparams argument to know which loader to use; refer 60 requires a --sourceparams argument to know which loader to use; refer
61 to "bootimg-efi" code/documentation for the list of loader. 61 to "bootimg_efi" code/documentation for the list of loader.
62 62
63 Imports are handled with "SourceFileLoader" from importlib as it is 63 Imports are handled with "SourceFileLoader" from importlib as it is
64 otherwise very difficult to import module that has hyphen "-" in their 64 otherwise very difficult to import module that has hyphen "-" in their
65 filename. 65 filename.
66 The SourcePlugin() methods used in the plugins (do_install_disk, 66 The SourcePlugin() methods used in the plugins (do_install_disk,
67 do_configure_partition, do_prepare_partition) are then called on both, 67 do_configure_partition, do_prepare_partition) are then called on both,
68 beginning by "bootimg-efi". 68 beginning by "bootimg_efi".
69 69
70 Plugin options, such as "--sourceparams" can still be passed to a 70 Plugin options, such as "--sourceparams" can still be passed to a
71 plugin, as long they does not cause issue in the other plugin. 71 plugin, as long they does not cause issue in the other plugin.
72 72
73 Example wic configuration: 73 Example wic configuration:
74 part /boot --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\\ 74 part /boot --source bootimg_biosplusefi --sourceparams="loader=grub-efi"\\
75 --ondisk sda --label os_boot --active --align 1024 --use-uuid 75 --ondisk sda --label os_boot --active --align 1024 --use-uuid
76 """ 76 """
77 77
78 name = 'bootimg-biosplusefi' 78 name = 'bootimg_biosplusefi'
79 79
80 __PCBIOS_MODULE_NAME = "bootimg-pcbios" 80 __PCBIOS_MODULE_NAME = "bootimg_pcbios"
81 __EFI_MODULE_NAME = "bootimg-efi" 81 __EFI_MODULE_NAME = "bootimg_efi"
82 82
83 __imgEFIObj = None 83 __imgEFIObj = None
84 __imgBiosObj = None 84 __imgBiosObj = None
@@ -100,7 +100,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
100 100
101 """ 101 """
102 102
103 # Import bootimg-pcbios (class name "BootimgPcbiosPlugin") 103 # Import bootimg_pcbios (class name "BootimgPcbiosPlugin")
104 modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), 104 modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
105 cls.__PCBIOS_MODULE_NAME + ".py") 105 cls.__PCBIOS_MODULE_NAME + ".py")
106 loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath) 106 loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath)
@@ -108,7 +108,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
108 loader.exec_module(mod) 108 loader.exec_module(mod)
109 cls.__imgBiosObj = mod.BootimgPcbiosPlugin() 109 cls.__imgBiosObj = mod.BootimgPcbiosPlugin()
110 110
111 # Import bootimg-efi (class name "BootimgEFIPlugin") 111 # Import bootimg_efi (class name "BootimgEFIPlugin")
112 modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), 112 modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
113 cls.__EFI_MODULE_NAME + ".py") 113 cls.__EFI_MODULE_NAME + ".py")
114 loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath) 114 loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath)
diff --git a/scripts/lib/wic/plugins/source/bootimg-efi.py b/scripts/lib/wic/plugins/source/bootimg_efi.py
index 13a9cddf4e..cf16705a28 100644
--- a/scripts/lib/wic/plugins/source/bootimg-efi.py
+++ b/scripts/lib/wic/plugins/source/bootimg_efi.py
@@ -4,7 +4,7 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6# DESCRIPTION 6# DESCRIPTION
7# This implements the 'bootimg-efi' source plugin class for 'wic' 7# This implements the 'bootimg_efi' source plugin class for 'wic'
8# 8#
9# AUTHORS 9# AUTHORS
10# Tom Zanussi <tom.zanussi (at] linux.intel.com> 10# Tom Zanussi <tom.zanussi (at] linux.intel.com>
@@ -32,7 +32,7 @@ class BootimgEFIPlugin(SourcePlugin):
32 This plugin supports GRUB 2 and systemd-boot bootloaders. 32 This plugin supports GRUB 2 and systemd-boot bootloaders.
33 """ 33 """
34 34
35 name = 'bootimg-efi' 35 name = 'bootimg_efi'
36 36
37 @classmethod 37 @classmethod
38 def _copy_additional_files(cls, hdddir, initrd, dtb): 38 def _copy_additional_files(cls, hdddir, initrd, dtb):
@@ -43,16 +43,18 @@ class BootimgEFIPlugin(SourcePlugin):
43 if initrd: 43 if initrd:
44 initrds = initrd.split(';') 44 initrds = initrd.split(';')
45 for rd in initrds: 45 for rd in initrds:
46 cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir) 46 cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, rd, hdddir)
47 exec_cmd(cp_cmd, True) 47 out = exec_cmd(cp_cmd, True)
48 logger.debug("initrd files:\n%s" % (out))
48 else: 49 else:
49 logger.debug("Ignoring missing initrd") 50 logger.debug("Ignoring missing initrd")
50 51
51 if dtb: 52 if dtb:
52 if ';' in dtb: 53 if ';' in dtb:
53 raise WicError("Only one DTB supported, exiting") 54 raise WicError("Only one DTB supported, exiting")
54 cp_cmd = "cp %s/%s %s" % (bootimg_dir, dtb, hdddir) 55 cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, dtb, hdddir)
55 exec_cmd(cp_cmd, True) 56 out = exec_cmd(cp_cmd, True)
57 logger.debug("dtb files:\n%s" % (out))
56 58
57 @classmethod 59 @classmethod
58 def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params): 60 def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params):
@@ -123,8 +125,16 @@ class BootimgEFIPlugin(SourcePlugin):
123 @classmethod 125 @classmethod
124 def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params): 126 def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params):
125 """ 127 """
126 Create loader-specific systemd-boot/gummiboot config 128 Create loader-specific systemd-boot/gummiboot config. Unified Kernel Image (uki)
129 support is done in image recipe with uki.bbclass and only systemd-boot loader config
130 and ESP partition structure is created here.
127 """ 131 """
132 # detect uki.bbclass usage
133 image_classes = get_bitbake_var("IMAGE_CLASSES").split()
134 unified_image = False
135 if "uki" in image_classes:
136 unified_image = True
137
128 install_cmd = "install -d %s/loader" % hdddir 138 install_cmd = "install -d %s/loader" % hdddir
129 exec_cmd(install_cmd) 139 exec_cmd(install_cmd)
130 140
@@ -132,28 +142,26 @@ class BootimgEFIPlugin(SourcePlugin):
132 exec_cmd(install_cmd) 142 exec_cmd(install_cmd)
133 143
134 bootloader = creator.ks.bootloader 144 bootloader = creator.ks.bootloader
135
136 unified_image = source_params.get('create-unified-kernel-image') == "true"
137
138 loader_conf = "" 145 loader_conf = ""
139 if not unified_image:
140 loader_conf += "default boot\n"
141 loader_conf += "timeout %d\n" % bootloader.timeout
142 146
143 initrd = source_params.get('initrd') 147 # 5 seconds is a sensible default timeout
144 dtb = source_params.get('dtb') 148 loader_conf += "timeout %d\n" % (bootloader.timeout or 5)
145
146 if not unified_image:
147 cls._copy_additional_files(hdddir, initrd, dtb)
148 149
149 logger.debug("Writing systemd-boot config " 150 logger.debug("Writing systemd-boot config "
150 "%s/hdd/boot/loader/loader.conf", cr_workdir) 151 "%s/hdd/boot/loader/loader.conf", cr_workdir)
151 cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w") 152 cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w")
152 cfg.write(loader_conf) 153 cfg.write(loader_conf)
154 logger.debug("loader.conf:\n%s" % (loader_conf))
153 cfg.close() 155 cfg.close()
154 156
157 initrd = source_params.get('initrd')
158 dtb = source_params.get('dtb')
159 if not unified_image:
160 cls._copy_additional_files(hdddir, initrd, dtb)
161
155 configfile = creator.ks.bootloader.configfile 162 configfile = creator.ks.bootloader.configfile
156 custom_cfg = None 163 custom_cfg = None
164 boot_conf = ""
157 if configfile: 165 if configfile:
158 custom_cfg = get_custom_config(configfile) 166 custom_cfg = get_custom_config(configfile)
159 if custom_cfg: 167 if custom_cfg:
@@ -164,8 +172,7 @@ class BootimgEFIPlugin(SourcePlugin):
164 else: 172 else:
165 raise WicError("configfile is specified but failed to " 173 raise WicError("configfile is specified but failed to "
166 "get it from %s.", configfile) 174 "get it from %s.", configfile)
167 175 else:
168 if not custom_cfg:
169 # Create systemd-boot configuration using parameters from wks file 176 # Create systemd-boot configuration using parameters from wks file
170 kernel = get_bitbake_var("KERNEL_IMAGETYPE") 177 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
171 if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1": 178 if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
@@ -175,7 +182,6 @@ class BootimgEFIPlugin(SourcePlugin):
175 182
176 title = source_params.get('title') 183 title = source_params.get('title')
177 184
178 boot_conf = ""
179 boot_conf += "title %s\n" % (title if title else "boot") 185 boot_conf += "title %s\n" % (title if title else "boot")
180 boot_conf += "linux /%s\n" % kernel 186 boot_conf += "linux /%s\n" % kernel
181 187
@@ -200,6 +206,7 @@ class BootimgEFIPlugin(SourcePlugin):
200 "%s/hdd/boot/loader/entries/boot.conf", cr_workdir) 206 "%s/hdd/boot/loader/entries/boot.conf", cr_workdir)
201 cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w") 207 cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w")
202 cfg.write(boot_conf) 208 cfg.write(boot_conf)
209 logger.debug("boot.conf:\n%s" % (boot_conf))
203 cfg.close() 210 cfg.close()
204 211
205 212
@@ -223,9 +230,9 @@ class BootimgEFIPlugin(SourcePlugin):
223 elif source_params['loader'] == 'uefi-kernel': 230 elif source_params['loader'] == 'uefi-kernel':
224 pass 231 pass
225 else: 232 else:
226 raise WicError("unrecognized bootimg-efi loader: %s" % source_params['loader']) 233 raise WicError("unrecognized bootimg_efi loader: %s" % source_params['loader'])
227 except KeyError: 234 except KeyError:
228 raise WicError("bootimg-efi requires a loader, none specified") 235 raise WicError("bootimg_efi requires a loader, none specified")
229 236
230 if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None: 237 if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None:
231 logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES') 238 logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES')
@@ -245,7 +252,7 @@ class BootimgEFIPlugin(SourcePlugin):
245 252
246 # list of tuples (src_name, dst_name) 253 # list of tuples (src_name, dst_name)
247 deploy_files = [] 254 deploy_files = []
248 for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): 255 for src_entry in re.findall(r'[\w;\-\.\+/\*]+', boot_files):
249 if ';' in src_entry: 256 if ';' in src_entry:
250 dst_entry = tuple(src_entry.split(';')) 257 dst_entry = tuple(src_entry.split(';'))
251 if not dst_entry[0] or not dst_entry[1]: 258 if not dst_entry[0] or not dst_entry[1]:
@@ -304,134 +311,43 @@ class BootimgEFIPlugin(SourcePlugin):
304 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) 311 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
305 312
306 if source_params.get('create-unified-kernel-image') == "true": 313 if source_params.get('create-unified-kernel-image') == "true":
307 initrd = source_params.get('initrd') 314 raise WicError("create-unified-kernel-image is no longer supported. Please use uki.bbclass.")
308 if not initrd:
309 raise WicError("initrd= must be specified when create-unified-kernel-image=true, exiting")
310
311 deploy_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
312 efi_stub = glob("%s/%s" % (deploy_dir, "linux*.efi.stub"))
313 if len(efi_stub) == 0:
314 raise WicError("Unified Kernel Image EFI stub not found, exiting")
315 efi_stub = efi_stub[0]
316
317 with tempfile.TemporaryDirectory() as tmp_dir:
318 label = source_params.get('label')
319 label_conf = "root=%s" % creator.rootdev
320 if label:
321 label_conf = "LABEL=%s" % label
322
323 bootloader = creator.ks.bootloader
324 cmdline = open("%s/cmdline" % tmp_dir, "w")
325 cmdline.write("%s %s" % (label_conf, bootloader.append))
326 cmdline.close()
327 315
328 initrds = initrd.split(';') 316 if source_params.get('install-kernel-into-boot-dir') != 'false':
329 initrd = open("%s/initrd" % tmp_dir, "wb") 317 install_cmd = "install -v -p -m 0644 %s/%s %s/%s" % \
330 for f in initrds: 318 (staging_kernel_dir, kernel, hdddir, kernel)
331 with open("%s/%s" % (deploy_dir, f), 'rb') as in_file: 319 out = exec_cmd(install_cmd)
332 shutil.copyfileobj(in_file, initrd) 320 logger.debug("Installed kernel files:\n%s" % out)
333 initrd.close()
334
335 # Searched by systemd-boot:
336 # https://systemd.io/BOOT_LOADER_SPECIFICATION/#type-2-efi-unified-kernel-images
337 install_cmd = "install -d %s/EFI/Linux" % hdddir
338 exec_cmd(install_cmd)
339
340 staging_dir_host = get_bitbake_var("STAGING_DIR_HOST")
341 target_sys = get_bitbake_var("TARGET_SYS")
342
343 objdump_cmd = "%s-objdump" % target_sys
344 objdump_cmd += " -p %s" % efi_stub
345 objdump_cmd += " | awk '{ if ($1 == \"SectionAlignment\"){print $2} }'"
346
347 ret, align_str = exec_native_cmd(objdump_cmd, native_sysroot)
348 align = int(align_str, 16)
349
350 objdump_cmd = "%s-objdump" % target_sys
351 objdump_cmd += " -h %s | tail -2" % efi_stub
352 ret, output = exec_native_cmd(objdump_cmd, native_sysroot)
353
354 offset = int(output.split()[2], 16) + int(output.split()[3], 16)
355
356 osrel_off = offset + align - offset % align
357 osrel_path = "%s/usr/lib/os-release" % staging_dir_host
358 osrel_sz = os.stat(osrel_path).st_size
359
360 cmdline_off = osrel_off + osrel_sz
361 cmdline_off = cmdline_off + align - cmdline_off % align
362 cmdline_sz = os.stat(cmdline.name).st_size
363
364 dtb_off = cmdline_off + cmdline_sz
365 dtb_off = dtb_off + align - dtb_off % align
366
367 dtb = source_params.get('dtb')
368 if dtb:
369 if ';' in dtb:
370 raise WicError("Only one DTB supported, exiting")
371 dtb_path = "%s/%s" % (deploy_dir, dtb)
372 dtb_params = '--add-section .dtb=%s --change-section-vma .dtb=0x%x' % \
373 (dtb_path, dtb_off)
374 linux_off = dtb_off + os.stat(dtb_path).st_size
375 linux_off = linux_off + align - linux_off % align
376 else:
377 dtb_params = ''
378 linux_off = dtb_off
379
380 linux_path = "%s/%s" % (staging_kernel_dir, kernel)
381 linux_sz = os.stat(linux_path).st_size
382
383 initrd_off = linux_off + linux_sz
384 initrd_off = initrd_off + align - initrd_off % align
385
386 # https://www.freedesktop.org/software/systemd/man/systemd-stub.html
387 objcopy_cmd = "%s-objcopy" % target_sys
388 objcopy_cmd += " --enable-deterministic-archives"
389 objcopy_cmd += " --preserve-dates"
390 objcopy_cmd += " --add-section .osrel=%s" % osrel_path
391 objcopy_cmd += " --change-section-vma .osrel=0x%x" % osrel_off
392 objcopy_cmd += " --add-section .cmdline=%s" % cmdline.name
393 objcopy_cmd += " --change-section-vma .cmdline=0x%x" % cmdline_off
394 objcopy_cmd += dtb_params
395 objcopy_cmd += " --add-section .linux=%s" % linux_path
396 objcopy_cmd += " --change-section-vma .linux=0x%x" % linux_off
397 objcopy_cmd += " --add-section .initrd=%s" % initrd.name
398 objcopy_cmd += " --change-section-vma .initrd=0x%x" % initrd_off
399 objcopy_cmd += " %s %s/EFI/Linux/linux.efi" % (efi_stub, hdddir)
400
401 exec_native_cmd(objcopy_cmd, native_sysroot)
402 else:
403 if source_params.get('install-kernel-into-boot-dir') != 'false':
404 install_cmd = "install -m 0644 %s/%s %s/%s" % \
405 (staging_kernel_dir, kernel, hdddir, kernel)
406 exec_cmd(install_cmd)
407 321
408 if get_bitbake_var("IMAGE_EFI_BOOT_FILES"): 322 if get_bitbake_var("IMAGE_EFI_BOOT_FILES"):
409 for src_path, dst_path in cls.install_task: 323 for src_path, dst_path in cls.install_task:
410 install_cmd = "install -m 0644 -D %s %s" \ 324 install_cmd = "install -v -p -m 0644 -D %s %s" \
411 % (os.path.join(kernel_dir, src_path), 325 % (os.path.join(kernel_dir, src_path),
412 os.path.join(hdddir, dst_path)) 326 os.path.join(hdddir, dst_path))
413 exec_cmd(install_cmd) 327 out = exec_cmd(install_cmd)
328 logger.debug("Installed IMAGE_EFI_BOOT_FILES:\n%s" % out)
414 329
415 try: 330 try:
416 if source_params['loader'] == 'grub-efi': 331 if source_params['loader'] == 'grub-efi':
417 shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir, 332 shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir,
418 "%s/grub.cfg" % cr_workdir) 333 "%s/grub.cfg" % cr_workdir)
419 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]: 334 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]:
420 cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:]) 335 cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:])
421 exec_cmd(cp_cmd, True) 336 exec_cmd(cp_cmd, True)
422 shutil.move("%s/grub.cfg" % cr_workdir, 337 shutil.move("%s/grub.cfg" % cr_workdir,
423 "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir) 338 "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir)
424 elif source_params['loader'] == 'systemd-boot': 339 elif source_params['loader'] == 'systemd-boot':
425 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]: 340 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]:
426 cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:]) 341 cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:])
427 exec_cmd(cp_cmd, True) 342 out = exec_cmd(cp_cmd, True)
343 logger.debug("systemd-boot files:\n%s" % out)
428 elif source_params['loader'] == 'uefi-kernel': 344 elif source_params['loader'] == 'uefi-kernel':
429 kernel = get_bitbake_var("KERNEL_IMAGETYPE") 345 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
430 if not kernel: 346 if not kernel:
431 raise WicError("Empty KERNEL_IMAGETYPE %s\n" % target) 347 raise WicError("Empty KERNEL_IMAGETYPE")
432 target = get_bitbake_var("TARGET_SYS") 348 target = get_bitbake_var("TARGET_SYS")
433 if not target: 349 if not target:
434 raise WicError("Unknown arch (TARGET_SYS) %s\n" % target) 350 raise WicError("Empty TARGET_SYS")
435 351
436 if re.match("x86_64", target): 352 if re.match("x86_64", target):
437 kernel_efi_image = "bootx64.efi" 353 kernel_efi_image = "bootx64.efi"
@@ -445,23 +361,33 @@ class BootimgEFIPlugin(SourcePlugin):
445 raise WicError("UEFI stub kernel is incompatible with target %s" % target) 361 raise WicError("UEFI stub kernel is incompatible with target %s" % target)
446 362
447 for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]: 363 for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]:
448 cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image) 364 cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image)
449 exec_cmd(cp_cmd, True) 365 out = exec_cmd(cp_cmd, True)
366 logger.debug("uefi-kernel files:\n%s" % out)
450 else: 367 else:
451 raise WicError("unrecognized bootimg-efi loader: %s" % 368 raise WicError("unrecognized bootimg_efi loader: %s" %
452 source_params['loader']) 369 source_params['loader'])
370
371 # must have installed at least one EFI bootloader
372 out = glob(os.path.join(hdddir, 'EFI', 'BOOT', 'boot*.efi'))
373 logger.debug("Installed EFI loader files:\n%s" % out)
374 if not out:
375 raise WicError("No EFI loaders installed to ESP partition. Check that grub-efi, systemd-boot or similar is installed.")
376
453 except KeyError: 377 except KeyError:
454 raise WicError("bootimg-efi requires a loader, none specified") 378 raise WicError("bootimg_efi requires a loader, none specified")
455 379
456 startup = os.path.join(kernel_dir, "startup.nsh") 380 startup = os.path.join(kernel_dir, "startup.nsh")
457 if os.path.exists(startup): 381 if os.path.exists(startup):
458 cp_cmd = "cp %s %s/" % (startup, hdddir) 382 cp_cmd = "cp -v -p %s %s/" % (startup, hdddir)
459 exec_cmd(cp_cmd, True) 383 out = exec_cmd(cp_cmd, True)
384 logger.debug("startup files:\n%s" % out)
460 385
461 for paths in part.include_path or []: 386 for paths in part.include_path or []:
462 for path in paths: 387 for path in paths:
463 cp_cmd = "cp -r %s %s/" % (path, hdddir) 388 cp_cmd = "cp -v -p -r %s %s/" % (path, hdddir)
464 exec_cmd(cp_cmd, True) 389 exec_cmd(cp_cmd, True)
390 logger.debug("include_path files:\n%s" % out)
465 391
466 du_cmd = "du -bks %s" % hdddir 392 du_cmd = "du -bks %s" % hdddir
467 out = exec_cmd(du_cmd) 393 out = exec_cmd(du_cmd)
@@ -489,12 +415,14 @@ class BootimgEFIPlugin(SourcePlugin):
489 415
490 label = part.label if part.label else "ESP" 416 label = part.label if part.label else "ESP"
491 417
492 dosfs_cmd = "mkdosfs -n %s -i %s -C %s %d" % \ 418 dosfs_cmd = "mkdosfs -v -n %s -i %s -C %s %d" % \
493 (label, part.fsuuid, bootimg, blocks) 419 (label, part.fsuuid, bootimg, blocks)
494 exec_native_cmd(dosfs_cmd, native_sysroot) 420 exec_native_cmd(dosfs_cmd, native_sysroot)
421 logger.debug("mkdosfs:\n%s" % (str(out)))
495 422
496 mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (bootimg, hdddir) 423 mcopy_cmd = "mcopy -v -p -i %s -s %s/* ::/" % (bootimg, hdddir)
497 exec_native_cmd(mcopy_cmd, native_sysroot) 424 out = exec_native_cmd(mcopy_cmd, native_sysroot)
425 logger.debug("mcopy:\n%s" % (str(out)))
498 426
499 chmod_cmd = "chmod 644 %s" % bootimg 427 chmod_cmd = "chmod 644 %s" % bootimg
500 exec_cmd(chmod_cmd) 428 exec_cmd(chmod_cmd)
diff --git a/scripts/lib/wic/plugins/source/bootimg-partition.py b/scripts/lib/wic/plugins/source/bootimg_partition.py
index 1071d1af3f..cc121a78f0 100644
--- a/scripts/lib/wic/plugins/source/bootimg-partition.py
+++ b/scripts/lib/wic/plugins/source/bootimg_partition.py
@@ -4,7 +4,7 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6# DESCRIPTION 6# DESCRIPTION
7# This implements the 'bootimg-partition' source plugin class for 7# This implements the 'bootimg_partition' source plugin class for
8# 'wic'. The plugin creates an image of boot partition, copying over 8# 'wic'. The plugin creates an image of boot partition, copying over
9# files listed in IMAGE_BOOT_FILES bitbake variable. 9# files listed in IMAGE_BOOT_FILES bitbake variable.
10# 10#
@@ -16,7 +16,7 @@ import logging
16import os 16import os
17import re 17import re
18 18
19from glob import glob 19from oe.bootfiles import get_boot_files
20 20
21from wic import WicError 21from wic import WicError
22from wic.engine import get_custom_config 22from wic.engine import get_custom_config
@@ -31,7 +31,7 @@ class BootimgPartitionPlugin(SourcePlugin):
31 listed in IMAGE_BOOT_FILES bitbake variable. 31 listed in IMAGE_BOOT_FILES bitbake variable.
32 """ 32 """
33 33
34 name = 'bootimg-partition' 34 name = 'bootimg_partition'
35 image_boot_files_var_name = 'IMAGE_BOOT_FILES' 35 image_boot_files_var_name = 'IMAGE_BOOT_FILES'
36 36
37 @classmethod 37 @classmethod
@@ -66,42 +66,7 @@ class BootimgPartitionPlugin(SourcePlugin):
66 66
67 logger.debug('Boot files: %s', boot_files) 67 logger.debug('Boot files: %s', boot_files)
68 68
69 # list of tuples (src_name, dst_name) 69 cls.install_task = get_boot_files(kernel_dir, boot_files)
70 deploy_files = []
71 for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files):
72 if ';' in src_entry:
73 dst_entry = tuple(src_entry.split(';'))
74 if not dst_entry[0] or not dst_entry[1]:
75 raise WicError('Malformed boot file entry: %s' % src_entry)
76 else:
77 dst_entry = (src_entry, src_entry)
78
79 logger.debug('Destination entry: %r', dst_entry)
80 deploy_files.append(dst_entry)
81
82 cls.install_task = [];
83 for deploy_entry in deploy_files:
84 src, dst = deploy_entry
85 if '*' in src:
86 # by default install files under their basename
87 entry_name_fn = os.path.basename
88 if dst != src:
89 # unless a target name was given, then treat name
90 # as a directory and append a basename
91 entry_name_fn = lambda name: \
92 os.path.join(dst,
93 os.path.basename(name))
94
95 srcs = glob(os.path.join(kernel_dir, src))
96
97 logger.debug('Globbed sources: %s', ', '.join(srcs))
98 for entry in srcs:
99 src = os.path.relpath(entry, kernel_dir)
100 entry_dst_name = entry_name_fn(entry)
101 cls.install_task.append((src, entry_dst_name))
102 else:
103 cls.install_task.append((src, dst))
104
105 if source_params.get('loader') != "u-boot": 70 if source_params.get('loader') != "u-boot":
106 return 71 return
107 72
diff --git a/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/scripts/lib/wic/plugins/source/bootimg_pcbios.py
index a207a83530..21f41e00bb 100644
--- a/scripts/lib/wic/plugins/source/bootimg-pcbios.py
+++ b/scripts/lib/wic/plugins/source/bootimg_pcbios.py
@@ -4,7 +4,7 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6# DESCRIPTION 6# DESCRIPTION
7# This implements the 'bootimg-pcbios' source plugin class for 'wic' 7# This implements the 'bootimg_pcbios' source plugin class for 'wic'
8# 8#
9# AUTHORS 9# AUTHORS
10# Tom Zanussi <tom.zanussi (at] linux.intel.com> 10# Tom Zanussi <tom.zanussi (at] linux.intel.com>
@@ -27,7 +27,7 @@ class BootimgPcbiosPlugin(SourcePlugin):
27 Create MBR boot partition and install syslinux on it. 27 Create MBR boot partition and install syslinux on it.
28 """ 28 """
29 29
30 name = 'bootimg-pcbios' 30 name = 'bootimg_pcbios'
31 31
32 @classmethod 32 @classmethod
33 def _get_bootimg_dir(cls, bootimg_dir, dirname): 33 def _get_bootimg_dir(cls, bootimg_dir, dirname):
diff --git a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/scripts/lib/wic/plugins/source/isoimage_isohybrid.py
index 607356ad13..5d42eb5d3e 100644
--- a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
+++ b/scripts/lib/wic/plugins/source/isoimage_isohybrid.py
@@ -4,7 +4,7 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6# DESCRIPTION 6# DESCRIPTION
7# This implements the 'isoimage-isohybrid' source plugin class for 'wic' 7# This implements the 'isoimage_isohybrid' source plugin class for 'wic'
8# 8#
9# AUTHORS 9# AUTHORS
10# Mihaly Varga <mihaly.varga (at] ni.com> 10# Mihaly Varga <mihaly.varga (at] ni.com>
@@ -35,7 +35,7 @@ class IsoImagePlugin(SourcePlugin):
35 bootloader files. 35 bootloader files.
36 36
37 Example kickstart file: 37 Example kickstart file:
38 part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi, \\ 38 part /boot --source isoimage_isohybrid --sourceparams="loader=grub-efi, \\
39 image_name= IsoImage" --ondisk cd --label LIVECD 39 image_name= IsoImage" --ondisk cd --label LIVECD
40 bootloader --timeout=10 --append=" " 40 bootloader --timeout=10 --append=" "
41 41
@@ -45,7 +45,7 @@ class IsoImagePlugin(SourcePlugin):
45 extension added by direct imeger plugin) and a file named IsoImage-cd.iso 45 extension added by direct imeger plugin) and a file named IsoImage-cd.iso
46 """ 46 """
47 47
48 name = 'isoimage-isohybrid' 48 name = 'isoimage_isohybrid'
49 49
50 @classmethod 50 @classmethod
51 def do_configure_syslinux(cls, creator, cr_workdir): 51 def do_configure_syslinux(cls, creator, cr_workdir):
@@ -340,10 +340,10 @@ class IsoImagePlugin(SourcePlugin):
340 cls.do_configure_grubefi(part, creator, target_dir) 340 cls.do_configure_grubefi(part, creator, target_dir)
341 341
342 else: 342 else:
343 raise WicError("unrecognized bootimg-efi loader: %s" % 343 raise WicError("unrecognized bootimg_efi loader: %s" %
344 source_params['loader']) 344 source_params['loader'])
345 except KeyError: 345 except KeyError:
346 raise WicError("bootimg-efi requires a loader, none specified") 346 raise WicError("bootimg_efi requires a loader, none specified")
347 347
348 # Create efi.img that contains bootloader files for EFI booting 348 # Create efi.img that contains bootloader files for EFI booting
349 # if ISODIR didn't exist or didn't contains it 349 # if ISODIR didn't exist or didn't contains it
diff --git a/scripts/lib/wic/plugins/source/rootfs.py b/scripts/lib/wic/plugins/source/rootfs.py
index c990143c0d..06fce06bb1 100644
--- a/scripts/lib/wic/plugins/source/rootfs.py
+++ b/scripts/lib/wic/plugins/source/rootfs.py
@@ -41,7 +41,7 @@ class RootfsPlugin(SourcePlugin):
41 # Disallow climbing outside of parent directory using '..', 41 # Disallow climbing outside of parent directory using '..',
42 # because doing so could be quite disastrous (we will delete the 42 # because doing so could be quite disastrous (we will delete the
43 # directory, or modify a directory outside OpenEmbedded). 43 # directory, or modify a directory outside OpenEmbedded).
44 full_path = os.path.realpath(os.path.join(rootfs_dir, path)) 44 full_path = os.path.abspath(os.path.join(rootfs_dir, path))
45 if not full_path.startswith(os.path.realpath(rootfs_dir)): 45 if not full_path.startswith(os.path.realpath(rootfs_dir)):
46 logger.error("%s: Must point inside the rootfs: %s" % (cmd, path)) 46 logger.error("%s: Must point inside the rootfs: %s" % (cmd, path))
47 sys.exit(1) 47 sys.exit(1)
diff --git a/scripts/lz4c b/scripts/lz4c
new file mode 100755
index 0000000000..466fc349e0
--- /dev/null
+++ b/scripts/lz4c
@@ -0,0 +1,26 @@
1#!/usr/bin/env bash
2
3# Wrapper to intercept legacy lz4c arguments and convert to lz4.
4args=()
5while [ $# -ne 0 ]; do
6 case ${1} in
7 -c0)
8 args+=(-0)
9 ;;
10 -c1)
11 args+=(-9)
12 ;;
13 -c2|-hc)
14 args+=(-12)
15 ;;
16 -y)
17 args+=(--force)
18 ;;
19 *)
20 args+=("${1}")
21 ;;
22 esac
23 shift
24done
25
26exec lz4 "${args[@]}"
diff --git a/scripts/makefile-getvar b/scripts/makefile-getvar
new file mode 100755
index 0000000000..4a07055e68
--- /dev/null
+++ b/scripts/makefile-getvar
@@ -0,0 +1,24 @@
1#! /bin/sh
2
3# Get a variable's value from a makefile:
4#
5# $ makefile-getvar Makefile VARIABLE VARIABLE ...
6#
7# If multiple variables are specified, they will be printed one per line.
8#
9# SPDX-FileCopyrightText: Copyright 2024 Arm Limited and/or its affiliates <open-source-office@arm.com>
10# SPDX-License-Identifier: GPL-2.0-only
11
12set -eu
13
14MAKEFILE=$1
15shift
16
17for VARIABLE in $*; do
18 make -f - $VARIABLE.var <<EOF
19include $MAKEFILE
20
21%.var:
22 @echo \$(\$*)
23EOF
24done
diff --git a/scripts/oe-build-perf-report b/scripts/oe-build-perf-report
index 266700d294..a36f3c1bca 100755
--- a/scripts/oe-build-perf-report
+++ b/scripts/oe-build-perf-report
@@ -336,10 +336,16 @@ def print_html_report(data, id_comp, buildstats):
336 test_i = test_data['tests'][test] 336 test_i = test_data['tests'][test]
337 meas_i = test_i['measurements'][meas] 337 meas_i = test_i['measurements'][meas]
338 commit_num = get_data_item(meta, 'layers.meta.commit_count') 338 commit_num = get_data_item(meta, 'layers.meta.commit_count')
339 commit = get_data_item(meta, 'layers.meta.commit')
339 # Add start_time for both test measurement types of sysres and disk usage 340 # Add start_time for both test measurement types of sysres and disk usage
340 start_time = test_i['start_time'][0] 341 try:
342 # Use the commit_time if available, falling back to start_time
343 start_time = get_data_item(meta, 'layers.meta.commit_time')
344 except KeyError:
345 start_time = test_i['start_time'][0]
341 samples.append(measurement_stats(meas_i, '', start_time)) 346 samples.append(measurement_stats(meas_i, '', start_time))
342 samples[-1]['commit_num'] = commit_num 347 samples[-1]['commit_num'] = commit_num
348 samples[-1]['commit'] = commit
343 349
344 absdiff = samples[-1]['val_cls'](samples[-1]['mean'] - samples[id_comp]['mean']) 350 absdiff = samples[-1]['val_cls'](samples[-1]['mean'] - samples[id_comp]['mean'])
345 reldiff = absdiff * 100 / samples[id_comp]['mean'] 351 reldiff = absdiff * 100 / samples[id_comp]['mean']
diff --git a/scripts/oe-selftest b/scripts/oe-selftest
index 18ac0f5869..afc48d9905 100755
--- a/scripts/oe-selftest
+++ b/scripts/oe-selftest
@@ -18,8 +18,6 @@
18 18
19import os 19import os
20import sys 20import sys
21import argparse
22import logging
23 21
24scripts_path = os.path.dirname(os.path.realpath(__file__)) 22scripts_path = os.path.dirname(os.path.realpath(__file__))
25lib_path = scripts_path + '/lib' 23lib_path = scripts_path + '/lib'
diff --git a/scripts/oe-setup-build b/scripts/oe-setup-build
index c0476992a2..49603d9fd1 100755
--- a/scripts/oe-setup-build
+++ b/scripts/oe-setup-build
@@ -18,8 +18,7 @@ def makebuildpath(topdir, template):
18 18
19def discover_templates(layers_file): 19def discover_templates(layers_file):
20 if not os.path.exists(layers_file): 20 if not os.path.exists(layers_file):
21 print("List of layers {} does not exist; were the layers set up using the setup-layers script?".format(layers_file)) 21 raise Exception("List of layers {} does not exist; were the layers set up using the setup-layers script or bitbake-setup tool?".format(layers_file))
22 return None
23 22
24 templates = [] 23 templates = []
25 layers_list = json.load(open(layers_file))["layers"] 24 layers_list = json.load(open(layers_file))["layers"]
@@ -77,8 +76,7 @@ def find_template(template_name, templates):
77 for t in templates: 76 for t in templates:
78 if t["templatename"] == template_name: 77 if t["templatename"] == template_name:
79 return t 78 return t
80 print("Configuration {} is not one of {}, please try again.".format(tempalte_name, [t["templatename"] for t in templates])) 79 raise Exception("Configuration {} is not one of {}, please try again.".format(template_name, [t["templatename"] for t in templates]))
81 return None
82 80
83def setup_build_env(args): 81def setup_build_env(args):
84 templates = discover_templates(args.layerlist) 82 templates = discover_templates(args.layerlist)
@@ -102,9 +100,9 @@ def setup_build_env(args):
102 100
103 cmd = "TEMPLATECONF={} {}".format(template["templatepath"], cmd_base) 101 cmd = "TEMPLATECONF={} {}".format(template["templatepath"], cmd_base)
104 if not no_shell: 102 if not no_shell:
105 cmd = cmd + " && {}".format(os.environ['SHELL']) 103 cmd = cmd + " && {}".format(os.environ.get('SHELL','bash'))
106 print("Running:", cmd) 104 print("Running:", cmd)
107 subprocess.run(cmd, shell=True, executable=os.environ['SHELL']) 105 subprocess.run(cmd, shell=True, executable=os.environ.get('SHELL','bash'))
108 106
109parser = argparse.ArgumentParser(description="A script that discovers available build configurations and sets up a build environment based on one of them. Run without arguments to choose one interactively.") 107parser = argparse.ArgumentParser(description="A script that discovers available build configurations and sets up a build environment based on one of them. Run without arguments to choose one interactively.")
110parser.add_argument("--layerlist", default=defaultlayers(), help='Where to look for available layers (as written out by setup-layers script) (default is {}).'.format(defaultlayers())) 108parser.add_argument("--layerlist", default=defaultlayers(), help='Where to look for available layers (as written out by setup-layers script) (default is {}).'.format(defaultlayers()))
diff --git a/scripts/patchtest b/scripts/patchtest
index 0be7062dc2..9218db232a 100755
--- a/scripts/patchtest
+++ b/scripts/patchtest
@@ -9,12 +9,12 @@
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11
12import sys 12import json
13import os
14import unittest
15import logging 13import logging
14import os
15import sys
16import traceback 16import traceback
17import json 17import unittest
18 18
19# Include current path so test cases can see it 19# Include current path so test cases can see it
20sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) 20sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
@@ -22,16 +22,17 @@ sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
22# Include patchtest library 22# Include patchtest library
23sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '../meta/lib/patchtest')) 23sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '../meta/lib/patchtest'))
24 24
25from data import PatchTestInput 25from patchtest_parser import PatchtestParser
26from repo import PatchTestRepo 26from repo import PatchTestRepo
27 27
28import utils 28logger = logging.getLogger("patchtest")
29logger = utils.logger_create('patchtest') 29loggerhandler = logging.StreamHandler()
30loggerhandler.setFormatter(logging.Formatter("%(message)s"))
31logger.addHandler(loggerhandler)
32logger.setLevel(logging.INFO)
30info = logger.info 33info = logger.info
31error = logger.error 34error = logger.error
32 35
33import repo
34
35def getResult(patch, mergepatch, logfile=None): 36def getResult(patch, mergepatch, logfile=None):
36 37
37 class PatchTestResult(unittest.TextTestResult): 38 class PatchTestResult(unittest.TextTestResult):
@@ -46,10 +47,10 @@ def getResult(patch, mergepatch, logfile=None):
46 def startTestRun(self): 47 def startTestRun(self):
47 # let's create the repo already, it can be used later on 48 # let's create the repo already, it can be used later on
48 repoargs = { 49 repoargs = {
49 'repodir': PatchTestInput.repodir, 50 "repodir": PatchtestParser.repodir,
50 'commit' : PatchTestInput.basecommit, 51 "commit": PatchtestParser.basecommit,
51 'branch' : PatchTestInput.basebranch, 52 "branch": PatchtestParser.basebranch,
52 'patch' : patch, 53 "patch": patch,
53 } 54 }
54 55
55 self.repo_error = False 56 self.repo_error = False
@@ -57,7 +58,7 @@ def getResult(patch, mergepatch, logfile=None):
57 self.test_failure = False 58 self.test_failure = False
58 59
59 try: 60 try:
60 self.repo = PatchTestInput.repo = PatchTestRepo(**repoargs) 61 self.repo = PatchtestParser.repo = PatchTestRepo(**repoargs)
61 except: 62 except:
62 logger.error(traceback.print_exc()) 63 logger.error(traceback.print_exc())
63 self.repo_error = True 64 self.repo_error = True
@@ -128,7 +129,11 @@ def _runner(resultklass, prefix=None):
128 loader.testMethodPrefix = prefix 129 loader.testMethodPrefix = prefix
129 130
130 # create the suite with discovered tests and the corresponding runner 131 # create the suite with discovered tests and the corresponding runner
131 suite = loader.discover(start_dir=PatchTestInput.testdir, pattern=PatchTestInput.pattern, top_level_dir=PatchTestInput.topdir) 132 suite = loader.discover(
133 start_dir=PatchtestParser.testdir,
134 pattern=PatchtestParser.pattern,
135 top_level_dir=PatchtestParser.topdir,
136 )
132 ntc = suite.countTestCases() 137 ntc = suite.countTestCases()
133 138
134 # if there are no test cases, just quit 139 # if there are no test cases, just quit
@@ -160,24 +165,31 @@ def run(patch, logfile=None):
160 postmerge_resultklass = getResult(patch, True, logfile) 165 postmerge_resultklass = getResult(patch, True, logfile)
161 postmerge_result = _runner(postmerge_resultklass, 'test') 166 postmerge_result = _runner(postmerge_resultklass, 'test')
162 167
163 print('----------------------------------------------------------------------\n') 168 print_result_message(premerge_result, postmerge_result)
164 if premerge_result == 2 and postmerge_result == 2:
165 logger.error('patchtest: No test cases found - did you specify the correct suite directory?')
166 if premerge_result == 1 or postmerge_result == 1:
167 logger.error('WARNING: patchtest: At least one patchtest caused a failure or an error - please check https://wiki.yoctoproject.org/wiki/Patchtest for further guidance')
168 else:
169 logger.info('OK: patchtest: All patchtests passed')
170 print('----------------------------------------------------------------------\n')
171 return premerge_result or postmerge_result 169 return premerge_result or postmerge_result
172 170
171def print_result_message(preresult, postresult):
172 print("----------------------------------------------------------------------\n")
173 if preresult == 2 and postresult == 2:
174 logger.error(
175 "patchtest: No test cases found - did you specify the correct suite directory?"
176 )
177 if preresult == 1 or postresult == 1:
178 logger.error(
179 "WARNING: patchtest: At least one patchtest caused a failure or an error - please check https://wiki.yoctoproject.org/wiki/Patchtest for further guidance"
180 )
181 else:
182 logger.info("OK: patchtest: All patchtests passed")
183 print("----------------------------------------------------------------------\n")
184
173def main(): 185def main():
174 tmp_patch = False 186 tmp_patch = False
175 patch_path = PatchTestInput.patch_path 187 patch_path = PatchtestParser.patch_path
176 log_results = PatchTestInput.log_results 188 log_results = PatchtestParser.log_results
177 log_path = None 189 log_path = None
178 patch_list = None 190 patch_list = None
179 191
180 git_status = os.popen("(cd %s && git status)" % PatchTestInput.repodir).read() 192 git_status = os.popen("(cd %s && git status)" % PatchtestParser.repodir).read()
181 status_matches = ["Changes not staged for commit", "Changes to be committed"] 193 status_matches = ["Changes not staged for commit", "Changes to be committed"]
182 if any([match in git_status for match in status_matches]): 194 if any([match in git_status for match in status_matches]):
183 logger.error("patchtest: there are uncommitted changes in the target repo that would be overwritten. Please commit or restore them before running patchtest") 195 logger.error("patchtest: there are uncommitted changes in the target repo that would be overwritten. Please commit or restore them before running patchtest")
@@ -212,16 +224,16 @@ def main():
212if __name__ == '__main__': 224if __name__ == '__main__':
213 ret = 1 225 ret = 1
214 226
215 # Parse the command line arguments and store it on the PatchTestInput namespace 227 # Parse the command line arguments and store it on the PatchtestParser namespace
216 PatchTestInput.set_namespace() 228 PatchtestParser.set_namespace()
217 229
218 # set debugging level 230 # set debugging level
219 if PatchTestInput.debug: 231 if PatchtestParser.debug:
220 logger.setLevel(logging.DEBUG) 232 logger.setLevel(logging.DEBUG)
221 233
222 # if topdir not define, default it to testdir 234 # if topdir not define, default it to testdir
223 if not PatchTestInput.topdir: 235 if not PatchtestParser.topdir:
224 PatchTestInput.topdir = PatchTestInput.testdir 236 PatchtestParser.topdir = PatchtestParser.testdir
225 237
226 try: 238 try:
227 ret = main() 239 ret = main()
diff --git a/scripts/patchtest.README b/scripts/patchtest.README
index 76b5fcdb6d..3c1ee1af1d 100644
--- a/scripts/patchtest.README
+++ b/scripts/patchtest.README
@@ -3,40 +3,35 @@
3## Introduction 3## Introduction
4 4
5Patchtest is a test framework for community patches based on the standard 5Patchtest is a test framework for community patches based on the standard
6unittest python module. As input, it needs tree elements to work properly: 6unittest python module. As input, it needs three elements to work properly:
7a patch in mbox format (either created with `git format-patch` or fetched 7
8from 'patchwork'), a test suite and a target repository. 8- a patch in mbox format (either created with `git format-patch` or fetched
9from 'patchwork')
10- a test suite
11- a target repository
9 12
10The first test suite intended to be used with patchtest is found in the 13The first test suite intended to be used with patchtest is found in the
11openembedded-core repository [1] targeted for patches that get into the 14openembedded-core repository [1], targeted for patches that get into the
12openembedded-core mailing list [2]. This suite is also intended as a 15openembedded-core mailing list [2]. This suite is also intended as a
13baseline for development of similar suites for other layers as needed. 16baseline for development of similar suites for other layers as needed.
14 17
15Patchtest can either run on a host or a guest machine, depending on which 18Patchtest can either run on a host or a guest machine, depending on
16environment the execution needs to be done. If you plan to test your own patches 19which environment you prefer. If you plan to test your own patches (a
17(a good practice before these are sent to the mailing list), the easiest way is 20good practice before these are sent to the mailing list), the easiest
18to install and execute on your local host; in the other hand, if automatic 21way is to install and execute on your local host; in the other hand, if
19testing is intended, the guest method is strongly recommended. The guest 22automatic testing is intended, the guest method is strongly recommended.
20method requires the use of the patchtest layer, in addition to the tools 23The guest method requires the use of the patchtest layer, in addition to
21available in oe-core: https://git.yoctoproject.org/patchtest/ 24the tools available in oe-core: https://git.yoctoproject.org/patchtest/
22 25
23## Installation 26## Installation
24 27
25As a tool for use with the Yocto Project, the [quick start guide](https://docs.yoctoproject.org/brief-yoctoprojectqs/index.html) 28As a tool for use with the Yocto Project, the [quick start
26contains the necessary prerequisites for a basic project. In addition, 29guide](https://docs.yoctoproject.org/brief-yoctoprojectqs/index.html)
27patchtest relies on the following Python modules: 30contains the necessary prerequisites. In addition, patchtest relies on
28 31several Python modules for parsing and analysis, which can be installed
29- boto3 (for sending automated results emails only) 32by running `pip install -r meta/lib/patchtest/requirements.txt`. Note
30- git-pw>=2.5.0 33that git-pw is not automatically added to the user's PATH; by default,
31- jinja2 34it is installed at ~/.local/bin/git-pw.
32- pylint
33- pyparsing>=3.0.9
34- unidiff
35
36These can be installed by running `pip install -r
37meta/lib/patchtest/requirements.txt`. Note that git-pw is not
38automatically added to the user's PATH; by default, it is installed at
39~/.local/bin/git-pw.
40 35
41For git-pw (and therefore scripts such as patchtest-get--series) to work, you need 36For git-pw (and therefore scripts such as patchtest-get--series) to work, you need
42to provide a Patchwork instance in your user's .gitconfig, like so (the project 37to provide a Patchwork instance in your user's .gitconfig, like so (the project
@@ -74,7 +69,7 @@ the target project, but these parameters can be configured using the `--limit`,
74To run patchtest on the host, do the following: 69To run patchtest on the host, do the following:
75 70
761. In openembedded-core/poky, do `source oe-init-build-env` 711. In openembedded-core/poky, do `source oe-init-build-env`
772. Generate patch files from the target repository by doing `git-format patch -N`, 722. Generate patch files from the target repository by doing `git format-patch -N`,
78 where N is the number of patches starting at HEAD, or by using git-pw 73 where N is the number of patches starting at HEAD, or by using git-pw
79 or patchtest-get-series 74 or patchtest-get-series
803. Run patchtest on a patch file by doing the following: 753. Run patchtest on a patch file by doing the following:
@@ -123,7 +118,7 @@ The general flow of guest mode is:
123 -device virtio-9p-pci,fsdev=test_mount,mount_tag=test_mount -smp 4 -m 118 -device virtio-9p-pci,fsdev=test_mount,mount_tag=test_mount -smp 4 -m
124 2048"` 119 2048"`
125 120
126Patchtest runs as an initscript for the core-image-patchtest image and 121Patchtest is run by an initscript for the core-image-patchtest image and
127shuts down after completion, so there is no input required from a user 122shuts down after completion, so there is no input required from a user
128during operation. Unlike in host mode, the guest is designed to 123during operation. Unlike in host mode, the guest is designed to
129automatically generate test result files, in the same directory as the 124automatically generate test result files, in the same directory as the
@@ -131,6 +126,17 @@ targeted patch files but with .testresult as an extension. These contain
131the entire output of the patchtest run for each respective pass, 126the entire output of the patchtest run for each respective pass,
132including the PASS, FAIL, and SKIP indicators for each test run. 127including the PASS, FAIL, and SKIP indicators for each test run.
133 128
129### Running Patchtest Selftests
130
131Patchtest also includes selftests, which are currently in the form of
132several contrived patch files and a runner script found in
133`meta/lib/patchtest/selftest/`. In order to run these, the
134`meta-selftest` layer must be added to bblayers.conf. It is also
135recommended to set BB_SERVER_TIMEOUT (and thus enable memory-resident
136bitbake) in local.conf to reduce runtime, as the bitbake startup process
137will otherwise add to it significantly when restarted for each test
138patch.
139
134## Contributing 140## Contributing
135 141
136The yocto mailing list (openembedded-core@lists.openembedded.org) is used for questions, 142The yocto mailing list (openembedded-core@lists.openembedded.org) is used for questions,
diff --git a/scripts/pull-sdpx-licenses.py b/scripts/pull-sdpx-licenses.py
new file mode 100755
index 0000000000..597a62133f
--- /dev/null
+++ b/scripts/pull-sdpx-licenses.py
@@ -0,0 +1,101 @@
1#! /usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: GPL-2.0-only
6
7import argparse
8import json
9import sys
10import urllib.request
11from pathlib import Path
12
13TOP_DIR = Path(__file__).parent.parent
14
15
16def main():
17 parser = argparse.ArgumentParser(
18 description="Update SPDX License files from upstream"
19 )
20 parser.add_argument(
21 "-v",
22 "--version",
23 metavar="MAJOR.MINOR[.MICRO]",
24 help="Pull specific version of License list instead of latest",
25 )
26 parser.add_argument(
27 "--overwrite",
28 action="store_true",
29 help="Update existing license file text with upstream text",
30 )
31 parser.add_argument(
32 "--deprecated",
33 action="store_true",
34 help="Update deprecated licenses",
35 )
36 parser.add_argument(
37 "--dest",
38 type=Path,
39 default=TOP_DIR / "meta" / "files" / "common-licenses",
40 help="Write licenses to directory DEST. Default is %(default)s",
41 )
42
43 args = parser.parse_args()
44
45 if args.version:
46 version = f"v{args.version}"
47 else:
48 # Fetch the latest release
49 req = urllib.request.Request(
50 "https://api.github.com/repos/spdx/license-list-data/releases/latest"
51 )
52 req.add_header("X-GitHub-Api-Version", "2022-11-28")
53 req.add_header("Accept", "application/vnd.github+json")
54 with urllib.request.urlopen(req) as response:
55 data = json.load(response)
56 version = data["tag_name"]
57
58 print(f"Pulling SPDX license list version {version}")
59 req = urllib.request.Request(
60 f"https://raw.githubusercontent.com/spdx/license-list-data/{version}/json/licenses.json"
61 )
62 with urllib.request.urlopen(req) as response:
63 spdx_licenses = json.load(response)
64
65 with (TOP_DIR / "meta" / "files" / "spdx-licenses.json").open("w") as f:
66 json.dump(spdx_licenses, f, sort_keys=True, indent=2)
67
68 total_count = len(spdx_licenses["licenses"])
69 updated = 0
70 for idx, lic in enumerate(spdx_licenses["licenses"]):
71 lic_id = lic["licenseId"]
72
73 print(f"[{idx + 1} of {total_count}] ", end="")
74
75 dest_license_file = args.dest / lic_id
76 if dest_license_file.is_file() and not args.overwrite:
77 print(f"Skipping {lic_id} since it already exists")
78 continue
79
80 print(f"Fetching {lic_id}... ", end="", flush=True)
81
82 req = urllib.request.Request(lic["detailsUrl"])
83 with urllib.request.urlopen(req) as response:
84 lic_data = json.load(response)
85
86 if lic_data["isDeprecatedLicenseId"] and not args.deprecated:
87 print("Skipping (deprecated)")
88 continue
89
90 with dest_license_file.open("w") as f:
91 f.write(lic_data["licenseText"])
92 updated += 1
93 print("done")
94
95 print(f"Updated {updated} licenses")
96
97 return 0
98
99
100if __name__ == "__main__":
101 sys.exit(main())
diff --git a/scripts/pybootchartgui/pybootchartgui/draw.py b/scripts/pybootchartgui/pybootchartgui/draw.py
index c6e67833ab..16739a0fa1 100644
--- a/scripts/pybootchartgui/pybootchartgui/draw.py
+++ b/scripts/pybootchartgui/pybootchartgui/draw.py
@@ -69,6 +69,11 @@ CPU_COLOR = (0.40, 0.55, 0.70, 1.0)
69IO_COLOR = (0.76, 0.48, 0.48, 0.5) 69IO_COLOR = (0.76, 0.48, 0.48, 0.5)
70# Disk throughput color. 70# Disk throughput color.
71DISK_TPUT_COLOR = (0.20, 0.71, 0.20, 1.0) 71DISK_TPUT_COLOR = (0.20, 0.71, 0.20, 1.0)
72
73BYTES_RECEIVED_COLOR = (0.0, 0.0, 1.0, 1.0)
74BYTES_TRANSMITTED_COLOR = (1.0, 0.0, 0.0, 1.0)
75BYTES_RECEIVE_DIFF_COLOR = (0.0, 0.0, 1.0, 0.3)
76BYTES_TRANSMIT_DIFF_COLOR = (1.0, 0.0, 0.0, 0.3)
72# CPU load chart color. 77# CPU load chart color.
73FILE_OPEN_COLOR = (0.20, 0.71, 0.71, 1.0) 78FILE_OPEN_COLOR = (0.20, 0.71, 0.71, 1.0)
74# Mem cached color 79# Mem cached color
@@ -437,6 +442,49 @@ def render_charts(ctx, options, clip, trace, curr_y, w, h, sec_w):
437 442
438 curr_y = curr_y + 30 + bar_h 443 curr_y = curr_y + 30 + bar_h
439 444
445 if trace.net_stats:
446 for iface, samples in trace.net_stats.items():
447 max_received_sample = max(samples, key=lambda s: s.received_bytes)
448 max_transmitted_sample = max(samples, key=lambda s: s.transmitted_bytes)
449 max_receive_diff_sample = max(samples, key=lambda s: s.receive_diff)
450 max_transmit_diff_sample = max(samples, key=lambda s: s.transmit_diff)
451
452 draw_text(ctx, "Iface: %s" % (iface), TEXT_COLOR, off_x, curr_y+20)
453 draw_legend_line(ctx, "Bytes received (max %d)" % (max_received_sample.received_bytes),
454 BYTES_RECEIVED_COLOR, off_x+150, curr_y+20, leg_s)
455 draw_legend_line(ctx, "Bytes transmitted (max %d)" % (max_transmitted_sample.transmitted_bytes),
456 BYTES_TRANSMITTED_COLOR, off_x+400, curr_y+20, leg_s)
457 draw_legend_box(ctx, "Bytes receive diff (max %d)" % (max_receive_diff_sample.receive_diff),
458 BYTES_RECEIVE_DIFF_COLOR, off_x+650, curr_y+20, leg_s)
459 draw_legend_box(ctx, "Bytes transmit diff (max %d)" % (max_transmit_diff_sample.transmit_diff),
460 BYTES_TRANSMIT_DIFF_COLOR, off_x+900, curr_y+20, leg_s)
461
462
463 chart_rect = (off_x, curr_y + 30, w, bar_h)
464 if clip_visible(clip, chart_rect):
465 draw_box_ticks(ctx, chart_rect, sec_w)
466 draw_annotations(ctx, proc_tree, trace.times, chart_rect)
467
468 if clip_visible (clip, chart_rect):
469 draw_chart (ctx, BYTES_RECEIVED_COLOR, False, chart_rect, \
470 [(sample.time, sample.received_bytes) for sample in samples], \
471 proc_tree, None)
472
473 draw_chart (ctx, BYTES_TRANSMITTED_COLOR, False, chart_rect, \
474 [(sample.time, sample.transmitted_bytes) for sample in samples], \
475 proc_tree, None)
476
477 if clip_visible (clip, chart_rect):
478 draw_chart (ctx, BYTES_RECEIVE_DIFF_COLOR, True, chart_rect, \
479 [(sample.time, sample.receive_diff) for sample in samples], \
480 proc_tree, None)
481
482 draw_chart (ctx, BYTES_TRANSMIT_DIFF_COLOR, True, chart_rect, \
483 [(sample.time, sample.transmit_diff) for sample in samples], \
484 proc_tree, None)
485
486 curr_y = curr_y + 30 + bar_h
487
440 # render CPU pressure chart 488 # render CPU pressure chart
441 if trace.cpu_pressure: 489 if trace.cpu_pressure:
442 max_sample_avg = max (trace.cpu_pressure, key = lambda s: s.avg10) 490 max_sample_avg = max (trace.cpu_pressure, key = lambda s: s.avg10)
diff --git a/scripts/pybootchartgui/pybootchartgui/parsing.py b/scripts/pybootchartgui/pybootchartgui/parsing.py
index 63a53b6b88..72a54c6ba5 100644
--- a/scripts/pybootchartgui/pybootchartgui/parsing.py
+++ b/scripts/pybootchartgui/pybootchartgui/parsing.py
@@ -48,6 +48,7 @@ class Trace:
48 self.filename = None 48 self.filename = None
49 self.parent_map = None 49 self.parent_map = None
50 self.mem_stats = [] 50 self.mem_stats = []
51 self.net_stats = []
51 self.monitor_disk = None 52 self.monitor_disk = None
52 self.cpu_pressure = [] 53 self.cpu_pressure = []
53 self.io_pressure = [] 54 self.io_pressure = []
@@ -457,7 +458,7 @@ def _parse_proc_disk_stat_log(file):
457 not sda1, sda2 etc. The format of relevant lines should be: 458 not sda1, sda2 etc. The format of relevant lines should be:
458 {major minor name rio rmerge rsect ruse wio wmerge wsect wuse running use aveq} 459 {major minor name rio rmerge rsect ruse wio wmerge wsect wuse running use aveq}
459 """ 460 """
460 disk_regex_re = re.compile ('^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$') 461 disk_regex_re = re.compile (r'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$')
461 462
462 # this gets called an awful lot. 463 # this gets called an awful lot.
463 def is_relevant_line(linetokens): 464 def is_relevant_line(linetokens):
@@ -557,6 +558,21 @@ def _parse_monitor_disk_log(file):
557 558
558 return disk_stats 559 return disk_stats
559 560
561
562def _parse_reduced_net_log(file):
563 net_stats = {}
564 for time, lines in _parse_timed_blocks(file):
565
566 for line in lines:
567 parts = line.split()
568 iface = parts[0][:-1]
569 if iface not in net_stats:
570 net_stats[iface] = [NetSample(time, iface, int(parts[1]), int(parts[2]), int(parts[3]), int(parts[4]))]
571 else:
572 net_stats[iface].append(NetSample(time, iface, int(parts[1]), int(parts[2]), int(parts[3]), int(parts[4])))
573 return net_stats
574
575
560def _parse_pressure_logs(file, filename): 576def _parse_pressure_logs(file, filename):
561 """ 577 """
562 Parse file for "some" pressure with 'avg10', 'avg60' 'avg300' and delta total values 578 Parse file for "some" pressure with 'avg10', 'avg60' 'avg300' and delta total values
@@ -594,8 +610,8 @@ def _parse_pressure_logs(file, filename):
594# [ 0.039993] calling migration_init+0x0/0x6b @ 1 610# [ 0.039993] calling migration_init+0x0/0x6b @ 1
595# [ 0.039993] initcall migration_init+0x0/0x6b returned 1 after 0 usecs 611# [ 0.039993] initcall migration_init+0x0/0x6b returned 1 after 0 usecs
596def _parse_dmesg(writer, file): 612def _parse_dmesg(writer, file):
597 timestamp_re = re.compile ("^\[\s*(\d+\.\d+)\s*]\s+(.*)$") 613 timestamp_re = re.compile (r"^\[\s*(\d+\.\d+)\s*]\s+(.*)$")
598 split_re = re.compile ("^(\S+)\s+([\S\+_-]+) (.*)$") 614 split_re = re.compile (r"^(\S+)\s+([\S\+_-]+) (.*)$")
599 processMap = {} 615 processMap = {}
600 idx = 0 616 idx = 0
601 inc = 1.0 / 1000000 617 inc = 1.0 / 1000000
@@ -640,7 +656,7 @@ def _parse_dmesg(writer, file):
640# print "foo: '%s' '%s' '%s'" % (type, func, rest) 656# print "foo: '%s' '%s' '%s'" % (type, func, rest)
641 if type == "calling": 657 if type == "calling":
642 ppid = kernel.pid 658 ppid = kernel.pid
643 p = re.match ("\@ (\d+)", rest) 659 p = re.match (r"\@ (\d+)", rest)
644 if p is not None: 660 if p is not None:
645 ppid = float (p.group(1)) // 1000 661 ppid = float (p.group(1)) // 1000
646# print "match: '%s' ('%g') at '%s'" % (func, ppid, time_ms) 662# print "match: '%s' ('%g') at '%s'" % (func, ppid, time_ms)
@@ -742,7 +758,7 @@ def get_num_cpus(headers):
742 cpu_model = headers.get("system.cpu") 758 cpu_model = headers.get("system.cpu")
743 if cpu_model is None: 759 if cpu_model is None:
744 return 1 760 return 1
745 mat = re.match(".*\\((\\d+)\\)", cpu_model) 761 mat = re.match(r".*\\((\\d+)\\)", cpu_model)
746 if mat is None: 762 if mat is None:
747 return 1 763 return 1
748 return max (int(mat.group(1)), 1) 764 return max (int(mat.group(1)), 1)
@@ -767,6 +783,8 @@ def _do_parse(writer, state, filename, file):
767 state.cmdline = _parse_cmdline_log(writer, file) 783 state.cmdline = _parse_cmdline_log(writer, file)
768 elif name == "monitor_disk.log": 784 elif name == "monitor_disk.log":
769 state.monitor_disk = _parse_monitor_disk_log(file) 785 state.monitor_disk = _parse_monitor_disk_log(file)
786 elif name == "reduced_proc_net.log":
787 state.net_stats = _parse_reduced_net_log(file)
770 #pressure logs are in a subdirectory 788 #pressure logs are in a subdirectory
771 elif name == "cpu.log": 789 elif name == "cpu.log":
772 state.cpu_pressure = _parse_pressure_logs(file, name) 790 state.cpu_pressure = _parse_pressure_logs(file, name)
diff --git a/scripts/pybootchartgui/pybootchartgui/samples.py b/scripts/pybootchartgui/pybootchartgui/samples.py
index a70d8a5a28..7c92d2ce6a 100644
--- a/scripts/pybootchartgui/pybootchartgui/samples.py
+++ b/scripts/pybootchartgui/pybootchartgui/samples.py
@@ -37,6 +37,16 @@ class CPUSample:
37 return str(self.time) + "\t" + str(self.user) + "\t" + \ 37 return str(self.time) + "\t" + str(self.user) + "\t" + \
38 str(self.sys) + "\t" + str(self.io) + "\t" + str (self.swap) 38 str(self.sys) + "\t" + str(self.io) + "\t" + str (self.swap)
39 39
40
41class NetSample:
42 def __init__(self, time, iface, received_bytes, transmitted_bytes, receive_diff, transmit_diff):
43 self.time = time
44 self.iface = iface
45 self.received_bytes = received_bytes
46 self.transmitted_bytes = transmitted_bytes
47 self.receive_diff = receive_diff
48 self.transmit_diff = transmit_diff
49
40class CPUPressureSample: 50class CPUPressureSample:
41 def __init__(self, time, avg10, avg60, avg300, deltaTotal): 51 def __init__(self, time, avg10, avg60, avg300, deltaTotal):
42 self.time = time 52 self.time = time
diff --git a/scripts/relocate_sdk.py b/scripts/relocate_sdk.py
index 8a728720ba..9e01c09cb0 100755
--- a/scripts/relocate_sdk.py
+++ b/scripts/relocate_sdk.py
@@ -49,6 +49,34 @@ def get_arch():
49 elif ei_class == 2: 49 elif ei_class == 2:
50 return 64 50 return 64
51 51
52def get_dl_arch(dl_path):
53 try:
54 with open(dl_path, "r+b") as f:
55 e_ident =f.read(16)
56 except IOError:
57 exctype, ioex = sys.exc_info()[:2]
58 if ioex.errno == errno.ETXTBSY:
59 print("Could not open %s. File used by another process.\nPlease "\
60 "make sure you exit all processes that might use any SDK "\
61 "binaries." % e)
62 else:
63 print("Could not open %s: %s(%d)" % (e, ioex.strerror, ioex.errno))
64 sys.exit(-1)
65
66 ei_mag0,ei_mag1_3,ei_class,ei_data,ei_version = struct.unpack("<B3sBBB9x", e_ident)
67
68 if (ei_mag0 != 0x7f and ei_mag1_3 != "ELF") or ei_class == 0:
69 print("ERROR: unknow %s" % dl_path)
70 sys.exit(-1)
71
72 if ei_class == 1:
73 arch = 32
74 elif ei_class == 2:
75 arch = 64
76
77 return arch
78
79
52def parse_elf_header(): 80def parse_elf_header():
53 global e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, e_flags,\ 81 global e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, e_flags,\
54 e_ehsize, e_phentsize, e_phnum, e_shentsize, e_shnum, e_shstrndx 82 e_ehsize, e_phentsize, e_phnum, e_shentsize, e_shnum, e_shstrndx
@@ -223,6 +251,8 @@ else:
223 251
224executables_list = sys.argv[3:] 252executables_list = sys.argv[3:]
225 253
254dl_arch = get_dl_arch(new_dl_path)
255
226errors = False 256errors = False
227for e in executables_list: 257for e in executables_list:
228 perms = os.stat(e)[stat.ST_MODE] 258 perms = os.stat(e)[stat.ST_MODE]
@@ -247,7 +277,7 @@ for e in executables_list:
247 old_size = os.path.getsize(e) 277 old_size = os.path.getsize(e)
248 if old_size >= 64: 278 if old_size >= 64:
249 arch = get_arch() 279 arch = get_arch()
250 if arch: 280 if arch and arch == dl_arch:
251 parse_elf_header() 281 parse_elf_header()
252 if not change_interpreter(e): 282 if not change_interpreter(e):
253 errors = True 283 errors = True
diff --git a/scripts/resulttool b/scripts/resulttool
index fc282bda6c..66a6af9959 100755
--- a/scripts/resulttool
+++ b/scripts/resulttool
@@ -15,6 +15,9 @@
15# To report test report, execute the below 15# To report test report, execute the below
16# $ resulttool report <source_dir> 16# $ resulttool report <source_dir>
17# 17#
18# To create a unit test report in JUnit XML format, execute the below
19# $ resulttool junit <json_file>
20#
18# To perform regression file analysis, execute the below 21# To perform regression file analysis, execute the below
19# $ resulttool regression-file <base_result_file> <target_result_file> 22# $ resulttool regression-file <base_result_file> <target_result_file>
20# 23#
@@ -43,6 +46,7 @@ import resulttool.regression
43import resulttool.report 46import resulttool.report
44import resulttool.manualexecution 47import resulttool.manualexecution
45import resulttool.log 48import resulttool.log
49import resulttool.junit
46logger = scriptutils.logger_create('resulttool') 50logger = scriptutils.logger_create('resulttool')
47 51
48def main(): 52def main():
@@ -61,6 +65,7 @@ def main():
61 resulttool.regression.register_commands(subparsers) 65 resulttool.regression.register_commands(subparsers)
62 resulttool.report.register_commands(subparsers) 66 resulttool.report.register_commands(subparsers)
63 resulttool.log.register_commands(subparsers) 67 resulttool.log.register_commands(subparsers)
68 resulttool.junit.register_commands(subparsers)
64 69
65 args = parser.parse_args() 70 args = parser.parse_args()
66 if args.debug: 71 if args.debug:
diff --git a/scripts/runqemu b/scripts/runqemu
index 69cd44864e..3d77046972 100755
--- a/scripts/runqemu
+++ b/scripts/runqemu
@@ -468,9 +468,11 @@ class BaseConfig(object):
468 self.set("IMAGE_LINK_NAME", image_link_name) 468 self.set("IMAGE_LINK_NAME", image_link_name)
469 logger.debug('Using IMAGE_LINK_NAME = "%s"' % image_link_name) 469 logger.debug('Using IMAGE_LINK_NAME = "%s"' % image_link_name)
470 470
471 def set_dri_path(self): 471 def set_mesa_paths(self):
472 drivers_path = os.path.join(self.bindir_native, '../lib/dri') 472 drivers_path = os.path.join(self.bindir_native, '../lib/dri')
473 if not os.path.exists(drivers_path) or not os.listdir(drivers_path): 473 gbm_path = os.path.join(self.bindir_native, '../lib/gbm')
474 if not os.path.exists(drivers_path) or not os.listdir(drivers_path) \
475 or not os.path.exists(gbm_path) or not os.listdir(gbm_path):
474 raise RunQemuError(""" 476 raise RunQemuError("""
475qemu has been built without opengl support and accelerated graphics support is not available. 477qemu has been built without opengl support and accelerated graphics support is not available.
476To enable it, add: 478To enable it, add:
@@ -479,6 +481,7 @@ DISTRO_FEATURES_NATIVESDK:append = " opengl"
479to your build configuration. 481to your build configuration.
480""") 482""")
481 self.qemu_environ['LIBGL_DRIVERS_PATH'] = drivers_path 483 self.qemu_environ['LIBGL_DRIVERS_PATH'] = drivers_path
484 self.qemu_environ['GBM_BACKENDS_PATH'] = gbm_path
482 485
483 def check_args(self): 486 def check_args(self):
484 for debug in ("-d", "--debug"): 487 for debug in ("-d", "--debug"):
@@ -1192,19 +1195,22 @@ to your build configuration.
1192 raise RunQemuError("a new one with sudo.") 1195 raise RunQemuError("a new one with sudo.")
1193 1196
1194 gid = os.getgid() 1197 gid = os.getgid()
1195 uid = os.getuid()
1196 logger.info("Setting up tap interface under sudo") 1198 logger.info("Setting up tap interface under sudo")
1197 cmd = ('sudo', self.qemuifup, str(gid)) 1199 cmd = ('sudo', self.qemuifup, str(gid))
1198 try: 1200 for _ in range(5):
1199 tap = subprocess.check_output(cmd).decode('utf-8').strip() 1201 try:
1200 except subprocess.CalledProcessError as e: 1202 tap = subprocess.check_output(cmd).decode('utf-8').strip()
1201 logger.error('Setting up tap device failed:\n%s\nRun runqemu-gen-tapdevs to manually create one.' % str(e)) 1203 except subprocess.CalledProcessError as e:
1202 sys.exit(1) 1204 logger.error('Setting up tap device failed:\n%s\nRun runqemu-gen-tapdevs to manually create one.' % str(e))
1203 lockfile = os.path.join(lockdir, tap) 1205 sys.exit(1)
1204 self.taplock = lockfile + '.lock' 1206 lockfile = os.path.join(lockdir, tap)
1205 self.acquire_taplock() 1207 self.taplock = lockfile + '.lock'
1206 self.cleantap = True 1208 if self.acquire_taplock():
1207 logger.debug('Created tap: %s' % tap) 1209 self.cleantap = True
1210 logger.debug('Created tap: %s' % tap)
1211 break
1212 else:
1213 tap = None
1208 1214
1209 if not tap: 1215 if not tap:
1210 logger.error("Failed to setup tap device. Run runqemu-gen-tapdevs to manually create.") 1216 logger.error("Failed to setup tap device. Run runqemu-gen-tapdevs to manually create.")
@@ -1295,6 +1301,10 @@ to your build configuration.
1295 elif drive_type.startswith("/dev/hd"): 1301 elif drive_type.startswith("/dev/hd"):
1296 logger.info('Using ide drive') 1302 logger.info('Using ide drive')
1297 vm_drive = "-drive file=%s,format=%s" % (self.rootfs, rootfs_format) 1303 vm_drive = "-drive file=%s,format=%s" % (self.rootfs, rootfs_format)
1304 elif drive_type.startswith("/dev/mmcblk"):
1305 logger.info('Using sdcard drive')
1306 vm_drive = '-drive id=sdcard0,if=none,file=%s,format=%s -device sdhci-pci -device sd-card,drive=sdcard0' \
1307 % (self.rootfs, rootfs_format)
1298 elif drive_type.startswith("/dev/vdb"): 1308 elif drive_type.startswith("/dev/vdb"):
1299 logger.info('Using block virtio drive'); 1309 logger.info('Using block virtio drive');
1300 vm_drive = '-drive id=disk0,file=%s,if=none,format=%s -device virtio-blk-device,drive=disk0%s' \ 1310 vm_drive = '-drive id=disk0,file=%s,if=none,format=%s -device virtio-blk-device,drive=disk0%s' \
@@ -1454,7 +1464,7 @@ to your build configuration.
1454 self.qemu_opt += ' -display ' 1464 self.qemu_opt += ' -display '
1455 if self.egl_headless == True: 1465 if self.egl_headless == True:
1456 self.check_render_nodes() 1466 self.check_render_nodes()
1457 self.set_dri_path() 1467 self.set_mesa_paths()
1458 self.qemu_opt += 'egl-headless,' 1468 self.qemu_opt += 'egl-headless,'
1459 else: 1469 else:
1460 if self.sdl == True: 1470 if self.sdl == True:
@@ -1464,10 +1474,10 @@ to your build configuration.
1464 self.qemu_opt += 'gtk,' 1474 self.qemu_opt += 'gtk,'
1465 1475
1466 if self.gl == True: 1476 if self.gl == True:
1467 self.set_dri_path() 1477 self.set_mesa_paths()
1468 self.qemu_opt += 'gl=on,' 1478 self.qemu_opt += 'gl=on,'
1469 elif self.gl_es == True: 1479 elif self.gl_es == True:
1470 self.set_dri_path() 1480 self.set_mesa_paths()
1471 self.qemu_opt += 'gl=es,' 1481 self.qemu_opt += 'gl=es,'
1472 self.qemu_opt += 'show-cursor=on' 1482 self.qemu_opt += 'show-cursor=on'
1473 1483
@@ -1483,7 +1493,7 @@ to your build configuration.
1483 # If no serial or serialtcp options were specified, only ttyS0 is created 1493 # If no serial or serialtcp options were specified, only ttyS0 is created
1484 # and sysvinit shows an error trying to enable ttyS1: 1494 # and sysvinit shows an error trying to enable ttyS1:
1485 # INIT: Id "S1" respawning too fast: disabled for 5 minutes 1495 # INIT: Id "S1" respawning too fast: disabled for 5 minutes
1486 serial_num = len(re.findall("-serial", self.qemu_opt)) 1496 serial_num = len(re.findall("(^| )-serial ", self.qemu_opt))
1487 1497
1488 # Assume if the user passed serial options, they know what they want 1498 # Assume if the user passed serial options, they know what they want
1489 # and pad to two devices 1499 # and pad to two devices
@@ -1503,7 +1513,7 @@ to your build configuration.
1503 1513
1504 self.qemu_opt += " %s" % self.get("QB_SERIAL_OPT") 1514 self.qemu_opt += " %s" % self.get("QB_SERIAL_OPT")
1505 1515
1506 serial_num = len(re.findall("-serial", self.qemu_opt)) 1516 serial_num = len(re.findall("(^| )-serial ", self.qemu_opt))
1507 if serial_num < 2: 1517 if serial_num < 2:
1508 self.qemu_opt += " -serial null" 1518 self.qemu_opt += " -serial null"
1509 1519
@@ -1669,6 +1679,9 @@ to your build configuration.
1669 if multiconfig: 1679 if multiconfig:
1670 multiconfig = "mc:%s" % multiconfig 1680 multiconfig = "mc:%s" % multiconfig
1671 1681
1682 if self.rootfs and not target:
1683 target = self.rootfs
1684
1672 if mach: 1685 if mach:
1673 cmd = 'MACHINE=%s bitbake -e %s %s' % (mach, multiconfig, target) 1686 cmd = 'MACHINE=%s bitbake -e %s %s' % (mach, multiconfig, target)
1674 else: 1687 else:
diff --git a/scripts/send-error-report b/scripts/send-error-report
index cfbcaa52cb..cc1bc7c2b1 100755
--- a/scripts/send-error-report
+++ b/scripts/send-error-report
@@ -6,6 +6,7 @@
6# Copyright (C) 2013 Intel Corporation 6# Copyright (C) 2013 Intel Corporation
7# Author: Andreea Proca <andreea.b.proca@intel.com> 7# Author: Andreea Proca <andreea.b.proca@intel.com>
8# Author: Michael Wood <michael.g.wood@intel.com> 8# Author: Michael Wood <michael.g.wood@intel.com>
9# Author: Thomas Perrot <thomas.perrot@bootlin.com>
9# 10#
10# SPDX-License-Identifier: GPL-2.0-only 11# SPDX-License-Identifier: GPL-2.0-only
11# 12#
@@ -22,7 +23,7 @@ scripts_lib_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'li
22sys.path.insert(0, scripts_lib_path) 23sys.path.insert(0, scripts_lib_path)
23import argparse_oe 24import argparse_oe
24 25
25version = "0.3" 26version = "0.4"
26 27
27log = logging.getLogger("send-error-report") 28log = logging.getLogger("send-error-report")
28logging.basicConfig(format='%(levelname)s: %(message)s') 29logging.basicConfig(format='%(levelname)s: %(message)s')
@@ -65,7 +66,7 @@ def edit_content(json_file_path):
65 66
66def prepare_data(args): 67def prepare_data(args):
67 # attempt to get the max_log_size from the server's settings 68 # attempt to get the max_log_size from the server's settings
68 max_log_size = getPayloadLimit(args.protocol+args.server+"/ClientPost/JSON") 69 max_log_size = getPayloadLimit(args.server+"/ClientPost/JSON")
69 70
70 if not os.path.isfile(args.error_file): 71 if not os.path.isfile(args.error_file):
71 log.error("No data file found.") 72 log.error("No data file found.")
@@ -135,19 +136,38 @@ def send_data(data, args):
135 headers={'Content-type': 'application/json', 'User-Agent': "send-error-report/"+version} 136 headers={'Content-type': 'application/json', 'User-Agent': "send-error-report/"+version}
136 137
137 if args.json: 138 if args.json:
138 url = args.protocol+args.server+"/ClientPost/JSON/" 139 url = args.server+"/ClientPost/JSON/"
139 else: 140 else:
140 url = args.protocol+args.server+"/ClientPost/" 141 url = args.server+"/ClientPost/"
141 142
142 req = urllib.request.Request(url, data=data, headers=headers) 143 req = urllib.request.Request(url, data=data, headers=headers)
144
145 log.debug(f"Request URL: {url}")
146 log.debug(f"Request Headers: {headers}")
147 log.debug(f"Request Data: {data.decode('utf-8')}")
148
143 try: 149 try:
144 response = urllib.request.urlopen(req) 150 response = urllib.request.urlopen(req)
145 except urllib.error.HTTPError as e: 151 except urllib.error.HTTPError as e:
146 logging.error(str(e)) 152 log.error(f"HTTP Error {e.code}: {e.reason}")
153 log.debug(f"Response Content: {e.read().decode('utf-8')}")
147 sys.exit(1) 154 sys.exit(1)
148 155
156 log.debug(f"Response Status: {response.status}")
157 log.debug(f"Response Headers: {response.getheaders()}")
149 print(response.read().decode('utf-8')) 158 print(response.read().decode('utf-8'))
150 159
160def validate_server_url(args):
161 # Get the error report server from an argument
162 server = args.server or 'https://errors.yoctoproject.org'
163
164 if not server.startswith('http://') and not server.startswith('https://'):
165 log.error("Missing a URL scheme either http:// or https:// in the server name: " + server)
166 sys.exit(1)
167
168 # Construct the final URL
169 return f"{server}"
170
151 171
152if __name__ == '__main__': 172if __name__ == '__main__':
153 arg_parse = argparse_oe.ArgumentParser(description="This scripts will send an error report to your specified error-report-web server.") 173 arg_parse = argparse_oe.ArgumentParser(description="This scripts will send an error report to your specified error-report-web server.")
@@ -164,8 +184,7 @@ if __name__ == '__main__':
164 arg_parse.add_argument("-s", 184 arg_parse.add_argument("-s",
165 "--server", 185 "--server",
166 help="Server to send error report to", 186 help="Server to send error report to",
167 type=str, 187 type=str)
168 default="errors.yoctoproject.org")
169 188
170 arg_parse.add_argument("-e", 189 arg_parse.add_argument("-e",
171 "--email", 190 "--email",
@@ -190,18 +209,22 @@ if __name__ == '__main__':
190 help="Return the result in json format, silences all other output", 209 help="Return the result in json format, silences all other output",
191 action="store_true") 210 action="store_true")
192 211
193 arg_parse.add_argument("--no-ssl", 212 arg_parse.add_argument("-d",
194 help="Use http instead of https protocol", 213 "--debug",
195 dest="protocol", 214 help="Enable debug mode to print request/response details",
196 action="store_const", const="http://", default="https://") 215 action="store_true")
197
198
199 216
200 args = arg_parse.parse_args() 217 args = arg_parse.parse_args()
201 218
219 args.server = validate_server_url(args)
220
202 if (args.json == False): 221 if (args.json == False):
203 print("Preparing to send errors to: "+args.server) 222 print("Preparing to send errors to: "+args.server)
204 223
224 # Enable debugging if requested
225 if args.debug:
226 log.setLevel(logging.DEBUG)
227
205 data = prepare_data(args) 228 data = prepare_data(args)
206 send_data(data, args) 229 send_data(data, args)
207 230
diff --git a/scripts/sstate-cache-management.py b/scripts/sstate-cache-management.py
index d3f600bd28..303b8f13a3 100755
--- a/scripts/sstate-cache-management.py
+++ b/scripts/sstate-cache-management.py
@@ -268,6 +268,10 @@ def parse_arguments():
268 # ) 268 # )
269 269
270 parser.add_argument( 270 parser.add_argument(
271 "-n", "--dry-run", action="store_true", help="Don't execute, just go through the motions."
272 )
273
274 parser.add_argument(
271 "-y", 275 "-y",
272 "--yes", 276 "--yes",
273 action="store_true", 277 action="store_true",
@@ -314,6 +318,9 @@ def main():
314 if args.debug >= 1: 318 if args.debug >= 1:
315 print("\n".join([str(p.path) for p in remove])) 319 print("\n".join([str(p.path) for p in remove]))
316 print(f"{len(remove)} out of {len(paths)} files will be removed!") 320 print(f"{len(remove)} out of {len(paths)} files will be removed!")
321 if args.dry_run:
322 return 0
323
317 if not args.yes: 324 if not args.yes:
318 print("Do you want to continue (y/n)?") 325 print("Do you want to continue (y/n)?")
319 confirm = input() in ("y", "Y") 326 confirm = input() in ("y", "Y")
diff --git a/scripts/test-remote-image b/scripts/test-remote-image
index d209d22854..1d018992b0 100755
--- a/scripts/test-remote-image
+++ b/scripts/test-remote-image
@@ -152,8 +152,7 @@ class AutoTargetProfile(BaseTargetProfile):
152 return controller 152 return controller
153 153
154 def set_kernel_file(self): 154 def set_kernel_file(self):
155 postconfig = "QA_GET_MACHINE = \"${MACHINE}\"" 155 machine = get_bb_var('MACHINE')
156 machine = get_bb_var('QA_GET_MACHINE', postconfig=postconfig)
157 self.kernel_file = self.kernel_type + '-' + machine + '.bin' 156 self.kernel_file = self.kernel_type + '-' + machine + '.bin'
158 157
159 def set_rootfs_file(self): 158 def set_rootfs_file(self):
@@ -215,13 +214,11 @@ class PublicAB(BaseRepoProfile):
215 def get_repo_path(self): 214 def get_repo_path(self):
216 path = '/machines/' 215 path = '/machines/'
217 216
218 postconfig = "QA_GET_MACHINE = \"${MACHINE}\"" 217 machine = get_bb_var('MACHINE')
219 machine = get_bb_var('QA_GET_MACHINE', postconfig=postconfig)
220 if 'qemu' in machine: 218 if 'qemu' in machine:
221 path += 'qemu/' 219 path += 'qemu/'
222 220
223 postconfig = "QA_GET_DISTRO = \"${DISTRO}\"" 221 distro = get_bb_var('DISTRO')
224 distro = get_bb_var('QA_GET_DISTRO', postconfig=postconfig)
225 path += distro.replace('poky', machine) + '/' 222 path += distro.replace('poky', machine) + '/'
226 return path 223 return path
227 224
diff --git a/scripts/wic b/scripts/wic
index 06e0b48db0..9137208f5e 100755
--- a/scripts/wic
+++ b/scripts/wic
@@ -237,6 +237,13 @@ def wic_ls_subcommand(args, usage_str):
237 Command-line handling for list content of images. 237 Command-line handling for list content of images.
238 The real work is done by engine.wic_ls() 238 The real work is done by engine.wic_ls()
239 """ 239 """
240
241 if args.image_name:
242 BB_VARS.default_image = args.image_name
243
244 if args.vars_dir:
245 BB_VARS.vars_dir = args.vars_dir
246
240 engine.wic_ls(args, args.native_sysroot) 247 engine.wic_ls(args, args.native_sysroot)
241 248
242def wic_cp_subcommand(args, usage_str): 249def wic_cp_subcommand(args, usage_str):
@@ -244,6 +251,12 @@ def wic_cp_subcommand(args, usage_str):
244 Command-line handling for copying files/dirs to images. 251 Command-line handling for copying files/dirs to images.
245 The real work is done by engine.wic_cp() 252 The real work is done by engine.wic_cp()
246 """ 253 """
254 if args.image_name:
255 BB_VARS.default_image = args.image_name
256
257 if args.vars_dir:
258 BB_VARS.vars_dir = args.vars_dir
259
247 engine.wic_cp(args, args.native_sysroot) 260 engine.wic_cp(args, args.native_sysroot)
248 261
249def wic_rm_subcommand(args, usage_str): 262def wic_rm_subcommand(args, usage_str):
@@ -251,6 +264,12 @@ def wic_rm_subcommand(args, usage_str):
251 Command-line handling for removing files/dirs from images. 264 Command-line handling for removing files/dirs from images.
252 The real work is done by engine.wic_rm() 265 The real work is done by engine.wic_rm()
253 """ 266 """
267 if args.image_name:
268 BB_VARS.default_image = args.image_name
269
270 if args.vars_dir:
271 BB_VARS.vars_dir = args.vars_dir
272
254 engine.wic_rm(args, args.native_sysroot) 273 engine.wic_rm(args, args.native_sysroot)
255 274
256def wic_write_subcommand(args, usage_str): 275def wic_write_subcommand(args, usage_str):
@@ -258,6 +277,12 @@ def wic_write_subcommand(args, usage_str):
258 Command-line handling for writing images. 277 Command-line handling for writing images.
259 The real work is done by engine.wic_write() 278 The real work is done by engine.wic_write()
260 """ 279 """
280 if args.image_name:
281 BB_VARS.default_image = args.image_name
282
283 if args.vars_dir:
284 BB_VARS.vars_dir = args.vars_dir
285
261 engine.wic_write(args, args.native_sysroot) 286 engine.wic_write(args, args.native_sysroot)
262 287
263def wic_help_subcommand(args, usage_str): 288def wic_help_subcommand(args, usage_str):
@@ -390,6 +415,12 @@ def wic_init_parser_ls(subparser):
390 help="image spec: <image>[:<vfat partition>[<path>]]") 415 help="image spec: <image>[:<vfat partition>[<path>]]")
391 subparser.add_argument("-n", "--native-sysroot", 416 subparser.add_argument("-n", "--native-sysroot",
392 help="path to the native sysroot containing the tools") 417 help="path to the native sysroot containing the tools")
418 subparser.add_argument("-e", "--image-name", dest="image_name",
419 help="name of the image to use the artifacts from "
420 "e.g. core-image-sato")
421 subparser.add_argument("-v", "--vars", dest='vars_dir',
422 help="directory with <image>.env files that store "
423 "bitbake variables")
393 424
394def imgpathtype(arg): 425def imgpathtype(arg):
395 img = imgtype(arg) 426 img = imgtype(arg)
@@ -404,6 +435,12 @@ def wic_init_parser_cp(subparser):
404 help="image spec: <image>:<vfat partition>[<path>] or <file>") 435 help="image spec: <image>:<vfat partition>[<path>] or <file>")
405 subparser.add_argument("-n", "--native-sysroot", 436 subparser.add_argument("-n", "--native-sysroot",
406 help="path to the native sysroot containing the tools") 437 help="path to the native sysroot containing the tools")
438 subparser.add_argument("-e", "--image-name", dest="image_name",
439 help="name of the image to use the artifacts from "
440 "e.g. core-image-sato")
441 subparser.add_argument("-v", "--vars", dest='vars_dir',
442 help="directory with <image>.env files that store "
443 "bitbake variables")
407 444
408def wic_init_parser_rm(subparser): 445def wic_init_parser_rm(subparser):
409 subparser.add_argument("path", type=imgpathtype, 446 subparser.add_argument("path", type=imgpathtype,
@@ -413,6 +450,12 @@ def wic_init_parser_rm(subparser):
413 subparser.add_argument("-r", dest="recursive_delete", action="store_true", default=False, 450 subparser.add_argument("-r", dest="recursive_delete", action="store_true", default=False,
414 help="remove directories and their contents recursively, " 451 help="remove directories and their contents recursively, "
415 " this only applies to ext* partition") 452 " this only applies to ext* partition")
453 subparser.add_argument("-e", "--image-name", dest="image_name",
454 help="name of the image to use the artifacts from "
455 "e.g. core-image-sato")
456 subparser.add_argument("-v", "--vars", dest='vars_dir',
457 help="directory with <image>.env files that store "
458 "bitbake variables")
416 459
417def expandtype(rules): 460def expandtype(rules):
418 """ 461 """
@@ -454,6 +497,12 @@ def wic_init_parser_write(subparser):
454 help="expand rules: auto or <partition>:<size>[,<partition>:<size>]") 497 help="expand rules: auto or <partition>:<size>[,<partition>:<size>]")
455 subparser.add_argument("-n", "--native-sysroot", 498 subparser.add_argument("-n", "--native-sysroot",
456 help="path to the native sysroot containing the tools") 499 help="path to the native sysroot containing the tools")
500 subparser.add_argument("--image-name", dest="image_name",
501 help="name of the image to use the artifacts from "
502 "e.g. core-image-sato")
503 subparser.add_argument("-v", "--vars", dest='vars_dir',
504 help="directory with <image>.env files that store "
505 "bitbake variables")
457 506
458def wic_init_parser_help(subparser): 507def wic_init_parser_help(subparser):
459 helpparsers = subparser.add_subparsers(dest='help_topic', help=hlp.wic_usage) 508 helpparsers = subparser.add_subparsers(dest='help_topic', help=hlp.wic_usage)