summaryrefslogtreecommitdiffstats
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rw-r--r--scripts/.oe-layers.json7
-rwxr-xr-xscripts/autobuilder-worker-prereq-tests23
-rwxr-xr-xscripts/b4-wrapper-poky.py185
-rwxr-xr-xscripts/bblock184
-rwxr-xr-xscripts/bitbake-prserv-tool47
-rwxr-xr-xscripts/bitbake-whatchanged320
-rwxr-xr-xscripts/buildhistory-diff5
-rwxr-xr-xscripts/buildstats-diff28
-rwxr-xr-xscripts/buildstats-summary140
-rwxr-xr-xscripts/clean-hashserver-database77
-rwxr-xr-xscripts/combo-layer38
-rwxr-xr-xscripts/combo-layer-hook-default.sh2
-rwxr-xr-xscripts/contrib/bb-perf/bb-matrix-plot.sh4
-rwxr-xr-xscripts/contrib/bbvars.py6
-rwxr-xr-xscripts/contrib/build-perf-test-wrapper.sh15
-rwxr-xr-xscripts/contrib/convert-overrides.py111
-rwxr-xr-xscripts/contrib/convert-spdx-licenses.py145
-rwxr-xr-xscripts/contrib/convert-variable-renames.py116
-rwxr-xr-xscripts/contrib/ddimage2
-rwxr-xr-xscripts/contrib/dialog-power-control2
-rwxr-xr-xscripts/contrib/documentation-audit.sh6
-rwxr-xr-xscripts/contrib/image-manifest2
-rwxr-xr-xscripts/contrib/improve_kernel_cve_report.py467
-rwxr-xr-xscripts/contrib/make-spdx-bindings.sh12
-rwxr-xr-xscripts/contrib/oe-build-perf-report-email.py167
-rw-r--r--scripts/contrib/oe-image-files-spdx/.gitignore8
-rw-r--r--scripts/contrib/oe-image-files-spdx/README.md24
-rw-r--r--scripts/contrib/oe-image-files-spdx/pyproject.toml23
-rw-r--r--scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py1
-rw-r--r--scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py86
-rw-r--r--scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py1
-rwxr-xr-xscripts/contrib/patchreview.py82
-rwxr-xr-xscripts/contrib/test_build_time.sh2
-rwxr-xr-xscripts/contrib/test_build_time_worker.sh2
-rwxr-xr-xscripts/contrib/verify-homepage.py2
-rwxr-xr-xscripts/cp-noerror2
-rwxr-xr-xscripts/create-pull-request9
-rwxr-xr-xscripts/crosstap2
-rwxr-xr-xscripts/cve-json-to-text.py146
-rwxr-xr-xscripts/devtool108
l---------scripts/esdk-tools/devtool1
l---------scripts/esdk-tools/oe-find-native-sysroot1
l---------scripts/esdk-tools/recipetool1
l---------scripts/esdk-tools/runqemu1
l---------scripts/esdk-tools/runqemu-addptable2image1
l---------scripts/esdk-tools/runqemu-export-rootfs1
l---------scripts/esdk-tools/runqemu-extract-sdk1
l---------scripts/esdk-tools/runqemu-gen-tapdevs1
l---------scripts/esdk-tools/runqemu-ifdown1
l---------scripts/esdk-tools/runqemu-ifup1
l---------scripts/esdk-tools/wic1
-rwxr-xr-xscripts/gen-lockedsig-cache3
-rwxr-xr-xscripts/gen-site-config43
-rwxr-xr-xscripts/git30
-rwxr-xr-xscripts/install-buildtools83
-rw-r--r--scripts/lib/argparse_oe.py2
-rw-r--r--scripts/lib/build_perf/html/measurement_chart.html214
-rw-r--r--scripts/lib/build_perf/html/report.html195
-rw-r--r--scripts/lib/build_perf/report.py5
-rw-r--r--scripts/lib/buildstats.py38
-rw-r--r--scripts/lib/checklayer/__init__.py59
-rw-r--r--scripts/lib/checklayer/cases/bsp.py4
-rw-r--r--scripts/lib/checklayer/cases/common.py70
-rw-r--r--scripts/lib/checklayer/cases/distro.py2
-rw-r--r--scripts/lib/devtool/__init__.py29
-rw-r--r--scripts/lib/devtool/build.py2
-rw-r--r--scripts/lib/devtool/build_sdk.py9
-rw-r--r--scripts/lib/devtool/deploy.py240
-rw-r--r--scripts/lib/devtool/ide_plugins/__init__.py282
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_code.py462
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_none.py53
-rwxr-xr-xscripts/lib/devtool/ide_sdk.py1009
-rw-r--r--scripts/lib/devtool/menuconfig.py11
-rw-r--r--scripts/lib/devtool/sdk.py5
-rw-r--r--scripts/lib/devtool/standard.py751
-rw-r--r--scripts/lib/devtool/upgrade.py246
-rw-r--r--scripts/lib/devtool/utilcmds.py2
-rw-r--r--scripts/lib/recipetool/append.py80
-rw-r--r--scripts/lib/recipetool/create.py369
-rw-r--r--scripts/lib/recipetool/create_buildsys.py40
-rw-r--r--scripts/lib/recipetool/create_buildsys_python.py1090
-rw-r--r--scripts/lib/recipetool/create_go.py777
-rw-r--r--scripts/lib/recipetool/create_npm.py139
-rw-r--r--scripts/lib/recipetool/licenses.csv37
-rw-r--r--scripts/lib/recipetool/setvar.py1
-rw-r--r--scripts/lib/resulttool/junit.py77
-rw-r--r--scripts/lib/resulttool/log.py13
-rwxr-xr-xscripts/lib/resulttool/manualexecution.py2
-rw-r--r--scripts/lib/resulttool/regression.py284
-rw-r--r--scripts/lib/resulttool/report.py7
-rw-r--r--scripts/lib/resulttool/resultutils.py84
-rw-r--r--scripts/lib/resulttool/store.py27
-rw-r--r--scripts/lib/scriptutils.py40
-rw-r--r--scripts/lib/wic/canned-wks/common.wks.inc2
-rw-r--r--scripts/lib/wic/canned-wks/directdisk-gpt.wks2
-rw-r--r--scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks2
-rw-r--r--scripts/lib/wic/canned-wks/efi-bootdisk.wks.in2
-rw-r--r--scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in3
-rw-r--r--scripts/lib/wic/canned-wks/mkefidisk.wks4
-rw-r--r--scripts/lib/wic/canned-wks/mkhybridiso.wks2
-rw-r--r--scripts/lib/wic/canned-wks/qemuloongarch.wks3
-rw-r--r--scripts/lib/wic/canned-wks/qemux86-directdisk.wks2
-rw-r--r--scripts/lib/wic/canned-wks/sdimage-bootpart.wks2
-rw-r--r--scripts/lib/wic/canned-wks/systemd-bootdisk.wks2
-rw-r--r--scripts/lib/wic/engine.py22
-rw-r--r--scripts/lib/wic/filemap.py7
-rw-r--r--scripts/lib/wic/help.py50
-rw-r--r--scripts/lib/wic/ksparser.py7
-rw-r--r--scripts/lib/wic/misc.py10
-rw-r--r--scripts/lib/wic/partition.py76
-rw-r--r--scripts/lib/wic/pluginbase.py10
-rw-r--r--scripts/lib/wic/plugins/imager/direct.py167
-rw-r--r--scripts/lib/wic/plugins/source/bootimg_biosplusefi.py (renamed from scripts/lib/wic/plugins/source/bootimg-biosplusefi.py)30
-rw-r--r--scripts/lib/wic/plugins/source/bootimg_efi.py (renamed from scripts/lib/wic/plugins/source/bootimg-efi.py)225
-rw-r--r--scripts/lib/wic/plugins/source/bootimg_partition.py (renamed from scripts/lib/wic/plugins/source/bootimg-partition.py)52
-rw-r--r--scripts/lib/wic/plugins/source/bootimg_pcbios.py (renamed from scripts/lib/wic/plugins/source/bootimg-pcbios.py)10
-rw-r--r--scripts/lib/wic/plugins/source/empty.py59
-rw-r--r--scripts/lib/wic/plugins/source/isoimage_isohybrid.py (renamed from scripts/lib/wic/plugins/source/isoimage-isohybrid.py)12
-rw-r--r--scripts/lib/wic/plugins/source/rawcopy.py36
-rw-r--r--scripts/lib/wic/plugins/source/rootfs.py13
-rwxr-xr-xscripts/lz4c26
-rwxr-xr-xscripts/makefile-getvar24
-rwxr-xr-xscripts/nativesdk-intercept/chgrp5
-rwxr-xr-xscripts/nativesdk-intercept/chown5
-rwxr-xr-xscripts/oe-build-perf-report12
-rwxr-xr-xscripts/oe-buildenv-internal31
-rwxr-xr-xscripts/oe-check-sstate12
-rwxr-xr-xscripts/oe-debuginfod19
-rwxr-xr-xscripts/oe-depends-dot34
-rwxr-xr-xscripts/oe-find-native-sysroot15
-rwxr-xr-xscripts/oe-gnome-terminal-phonehome2
-rwxr-xr-xscripts/oe-pkgdata-browser5
-rwxr-xr-xscripts/oe-pkgdata-util2
-rwxr-xr-xscripts/oe-pylint2
-rwxr-xr-xscripts/oe-selftest2
-rwxr-xr-xscripts/oe-setup-build129
-rwxr-xr-xscripts/oe-setup-builddir102
-rwxr-xr-xscripts/oe-setup-layers146
-rwxr-xr-xscripts/oe-setup-vscode93
-rwxr-xr-xscripts/oe-time-dd-test.sh4
-rwxr-xr-xscripts/oe-trim-schemas2
-rwxr-xr-xscripts/oepydevshell-internal.py2
-rwxr-xr-xscripts/opkg-query-helper.py2
-rwxr-xr-xscripts/patchtest244
-rwxr-xr-xscripts/patchtest-get-branch81
-rwxr-xr-xscripts/patchtest-get-series115
-rwxr-xr-xscripts/patchtest-send-results110
-rwxr-xr-xscripts/patchtest-setup-sharedir83
-rw-r--r--scripts/patchtest.README159
-rw-r--r--scripts/postinst-intercepts/update_gtk_icon_cache6
-rw-r--r--scripts/postinst-intercepts/update_mandb18
-rw-r--r--scripts/postinst-intercepts/update_udev_hwdb5
-rwxr-xr-xscripts/pull-sdpx-licenses.py101
-rw-r--r--scripts/pybootchartgui/pybootchartgui/draw.py199
-rw-r--r--scripts/pybootchartgui/pybootchartgui/parsing.py63
-rw-r--r--scripts/pybootchartgui/pybootchartgui/samples.py35
-rwxr-xr-xscripts/pythondeps2
-rwxr-xr-xscripts/relocate_sdk.py67
-rwxr-xr-xscripts/resulttool5
-rwxr-xr-xscripts/rpm2cpio.sh30
-rwxr-xr-xscripts/runqemu466
-rwxr-xr-xscripts/runqemu-addptable2image2
-rwxr-xr-xscripts/runqemu-export-rootfs25
-rwxr-xr-xscripts/runqemu-extract-sdk2
-rwxr-xr-xscripts/runqemu-gen-tapdevs120
-rwxr-xr-xscripts/runqemu-ifdown42
-rwxr-xr-xscripts/runqemu-ifup65
-rw-r--r--scripts/runqemu.README16
-rwxr-xr-xscripts/send-error-report49
-rwxr-xr-xscripts/sstate-cache-management.py336
-rwxr-xr-xscripts/sstate-cache-management.sh458
-rwxr-xr-xscripts/sstate-diff-machines.sh2
-rwxr-xr-xscripts/sstate-sysroot-cruft.sh14
-rwxr-xr-xscripts/sysroot-relativelinks.py2
-rwxr-xr-xscripts/task-time2
-rwxr-xr-xscripts/test-remote-image9
-rwxr-xr-xscripts/verify-bashisms12
-rwxr-xr-xscripts/wic54
-rwxr-xr-xscripts/yocto-check-layer24
-rwxr-xr-xscripts/yocto_testresults_query.py131
180 files changed, 10735 insertions, 3443 deletions
diff --git a/scripts/.oe-layers.json b/scripts/.oe-layers.json
new file mode 100644
index 0000000000..1b00a84b54
--- /dev/null
+++ b/scripts/.oe-layers.json
@@ -0,0 +1,7 @@
1{
2 "layers": [
3 "../meta-poky",
4 "../meta"
5 ],
6 "version": "1.0"
7}
diff --git a/scripts/autobuilder-worker-prereq-tests b/scripts/autobuilder-worker-prereq-tests
index 82e9a77bd5..54fd3c1004 100755
--- a/scripts/autobuilder-worker-prereq-tests
+++ b/scripts/autobuilder-worker-prereq-tests
@@ -1,5 +1,7 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# Script which can be run on new autobuilder workers to check all needed configuration is present. 5# Script which can be run on new autobuilder workers to check all needed configuration is present.
4# Designed to be run in a repo where bitbake/oe-core are already present. 6# Designed to be run in a repo where bitbake/oe-core are already present.
5# 7#
@@ -51,16 +53,31 @@ if (( $WATCHES < 65000 )); then
51 echo 'Need to increase watches (echo fs.inotify.max_user_watches=65536 | sudo tee -a /etc/sysctl.conf' 53 echo 'Need to increase watches (echo fs.inotify.max_user_watches=65536 | sudo tee -a /etc/sysctl.conf'
52 exit 1 54 exit 1
53fi 55fi
56OPEN_FILES=$(ulimit -n)
57if (( $OPEN_FILES < 65535 )); then
58 echo 'Increase maximum open files in /etc/security/limits.conf'
59 echo '* soft nofile 131072'
60 echo '* hard nofile 131072'
61 exit 1
62fi
63MAX_PROCESSES=$(ulimit -u)
64if (( $MAX_PROCESSES < 514542 )); then
65 echo 'Increase maximum user processes in /etc/security/limits.conf'
66 echo '* hard nproc 515294'
67 echo '* soft nproc 514543'
68 exit 1
69fi
70
54mkdir -p tmp/deploy/images/qemux86-64 71mkdir -p tmp/deploy/images/qemux86-64
55pushd tmp/deploy/images/qemux86-64 72pushd tmp/deploy/images/qemux86-64
56if [ ! -e core-image-minimal-qemux86-64.ext4 ]; then 73if [ ! -e core-image-minimal-qemux86-64.ext4 ]; then
57 wget http://downloads.yoctoproject.org/releases/yocto/yocto-2.5.1/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.ext4 74 wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.ext4
58fi 75fi
59if [ ! -e core-image-minimal-qemux86-64.qemuboot.conf ]; then 76if [ ! -e core-image-minimal-qemux86-64.qemuboot.conf ]; then
60 wget http://downloads.yoctoproject.org/releases/yocto/yocto-2.5.1/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.qemuboot.conf 77 wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.qemuboot.conf
61fi 78fi
62if [ ! -e bzImage-qemux86-64.bin ]; then 79if [ ! -e bzImage-qemux86-64.bin ]; then
63 wget http://downloads.yoctoproject.org/releases/yocto/yocto-2.5.1/machines/qemu/qemux86-64/bzImage-qemux86-64.bin 80 wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/bzImage-qemux86-64.bin
64fi 81fi
65popd 82popd
66bitbake qemu-helper-native 83bitbake qemu-helper-native
diff --git a/scripts/b4-wrapper-poky.py b/scripts/b4-wrapper-poky.py
new file mode 100755
index 0000000000..f1170db06b
--- /dev/null
+++ b/scripts/b4-wrapper-poky.py
@@ -0,0 +1,185 @@
1#!/usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7# This script is to be called by b4:
8# - through the b4.prep-perpatch-check-cmd with "prep-perpatch-check-cmd" as
9# first argument,
10# - through b4.send-auto-cc-cmd with "send-auto-cc-cmd" as first argument,
11# - through b4.send-auto-to-cmd with "send-auto-to-cmd" as first argument,
12#
13# When prep-perpatch-check-cmd is passsed:
14#
15# This checks that a patch makes changes to at most one project in the poky
16# combo repo (that is, out of yocto-docs, bitbake, openembedded-core combined
17# into poky and the poky-specific files).
18#
19# Printing something to stdout in this file will result in b4 prep --check fail
20# for the currently parsed patch.
21#
22# It checks that all patches in the series make changes to at most one project.
23#
24# When send-auto-cc-cmd is passed:
25#
26# This returns the list of Cc recipients for a patch.
27#
28# When send-auto-to-cmd is passed:
29#
30# This returns the list of To recipients for a patch.
31#
32# This script takes as stdin a patch.
33
34import pathlib
35import re
36import shutil
37import subprocess
38import sys
39
40cmd = sys.argv[1]
41
42patch = sys.stdin.readlines()
43
44# Subject field is used to identify the last patch as this script is called for
45# each patch. We edit the same file in a series by using the References field
46# unique identifier to check which projects are modified by earlier patches in
47# the series. To avoid cluttering the disk, the last patch in the list removes
48# that shared file.
49re_subject = re.compile(r'^Subject:.*\[.*PATCH.*\s(\d+)/\1')
50re_ref = re.compile(r'^References: <(.*)>$')
51
52subject = None
53ref = None
54
55if not shutil.which("lsdiff"):
56 print("lsdiff missing from host, please install patchutils", file=sys.stderr)
57 sys.exit(-1)
58
59try:
60 one_patch_series = False
61 for line in patch:
62 subject = re_subject.match(line)
63 if subject:
64 # Handle [PATCH 1/1]
65 if subject.group(1) == 1:
66 one_patch_series = True
67 break
68 if re.match(r'^Subject: .*\[.*PATCH[^/]*\]', line):
69 # Single patch is named [PATCH] but if there are prefix, it could be
70 # [PATCH prefix], so handle everything that doesn't have a /
71 # character which is used as separator between current patch number
72 # and total patch number
73 one_patch_series = True
74 break
75
76 if cmd == "prep-perpatch-check-cmd" and not one_patch_series:
77 for line in patch:
78 ref = re_ref.match(line)
79 if ref:
80 break
81
82 if not ref:
83 print("Failed to find ref to cover letter (References:)...", file=sys.stderr)
84 sys.exit(-2)
85
86 ref = ref.group(1)
87 series_check = pathlib.Path(f".tmp-{ref}")
88
89 patch = "".join(patch)
90
91 if cmd == "send-auto-cc-cmd":
92 # Patches to BitBake documentation should also go to yocto-docs mailing list
93 project_paths = {
94 "yocto-docs": ["bitbake/doc/*"],
95 }
96 else:
97 project_paths = {
98 "bitbake": ["bitbake/*"],
99 "yocto-docs": ["documentation/*"],
100 "poky": [
101 "meta-poky/*",
102 "meta-yocto-bsp/*",
103 "README.hardware.md",
104 "README.poky.md",
105 # scripts/b4-wrapper-poky.py is only run by b4 when in poky
106 # git repo. With that limitation, changes made to .b4-config
107 # can only be for poky's and not OE-Core's as only poky's is
108 # stored in poky git repo.
109 ".b4-config",
110 ],
111 }
112
113 # List of projects touched by this patch
114 projs = []
115
116 # Any file not matched by any path in project_paths means it is from
117 # OE-Core.
118 # When matching some path in project_paths, remove the matched files from
119 # that list.
120 files_left = subprocess.check_output(["lsdiff", "--strip-match=1", "--strip=1"],
121 input=patch, text=True)
122 files_left = set(files_left)
123
124 for proj, proj_paths in project_paths.items():
125 lsdiff_args = [f"--include={path}" for path in proj_paths]
126 files = subprocess.check_output(["lsdiff", "--strip-match=1", "--strip=1"] + lsdiff_args,
127 input=patch, text=True)
128 if len(files):
129 files_left = files_left - set(files)
130 projs.append(proj)
131 continue
132
133 # Handle patches made with --no-prefix
134 files = subprocess.check_output(["lsdiff"] + lsdiff_args,
135 input=patch, text=True)
136 if len(files):
137 files_left = files_left - set(files)
138 projs.append(proj)
139
140 # Catch-all for everything not poky-specific or in bitbake/yocto-docs
141 if len(files_left) and cmd != "send-auto-cc-cmd":
142 projs.append("openembedded-core")
143
144 if cmd == "prep-perpatch-check-cmd":
145 if len(projs) > 1:
146 print(f"Diff spans more than one project ({', '.join(sorted(projs))}), split into multiple commits...",
147 file=sys.stderr)
148 sys.exit(-3)
149
150 # No need to check other patches in the series as there aren't any
151 if one_patch_series:
152 sys.exit(0)
153
154 # This should be replaced once b4 supports prep-perseries-check-cmd (or something similar)
155
156 if series_check.exists():
157 # NOT race-free if b4 decides to parallelize prep-perpatch-check-cmd
158 series_projs = series_check.read_text().split('\n')
159 else:
160 series_projs = []
161
162 series_projs += projs
163 uniq_series_projs = set(series_projs)
164 # NOT race-free, if b4 decides to parallelize prep-perpatch-check-cmd
165 series_check.write_text('\n'.join(uniq_series_projs))
166
167 if len(uniq_series_projs) > 1:
168 print(f"Series spans more than one project ({', '.join(sorted(uniq_series_projs))}), split into multiple series...",
169 file=sys.stderr)
170 sys.exit(-4)
171 else: # send-auto-cc-cmd / send-auto-to-cmd
172 ml_projs = {
173 "bitbake": "bitbake-devel@lists.openembedded.org",
174 "yocto-docs": "docs@lists.yoctoproject.org",
175 "poky": "poky@lists.yoctoproject.org",
176 "openembedded-core": "openembedded-core@lists.openembedded.org",
177 }
178
179 print("\n".join([ml_projs[ml] for ml in projs]))
180
181 sys.exit(0)
182finally:
183 # Last patch in the series, cleanup tmp file
184 if subject and ref and series_check.exists():
185 series_check.unlink()
diff --git a/scripts/bblock b/scripts/bblock
new file mode 100755
index 0000000000..0082059af8
--- /dev/null
+++ b/scripts/bblock
@@ -0,0 +1,184 @@
1#!/usr/bin/env python3
2# bblock
3# lock/unlock task to latest signature
4#
5# Copyright (c) 2023 BayLibre, SAS
6# Author: Julien Stepahn <jstephan@baylibre.com>
7#
8# SPDX-License-Identifier: GPL-2.0-only
9#
10
11import os
12import sys
13import logging
14
15scripts_path = os.path.dirname(os.path.realpath(__file__))
16lib_path = scripts_path + "/lib"
17sys.path = sys.path + [lib_path]
18
19import scriptpath
20
21scriptpath.add_bitbake_lib_path()
22
23import bb.tinfoil
24import bb.msg
25
26import argparse_oe
27
28myname = os.path.basename(sys.argv[0])
29logger = bb.msg.logger_create(myname)
30
31
32def getTaskSignatures(tinfoil, pn, tasks):
33 tinfoil.set_event_mask(
34 [
35 "bb.event.GetTaskSignatureResult",
36 "logging.LogRecord",
37 "bb.command.CommandCompleted",
38 "bb.command.CommandFailed",
39 ]
40 )
41 ret = tinfoil.run_command("getTaskSignatures", pn, tasks)
42 if ret:
43 while True:
44 event = tinfoil.wait_event(1)
45 if event:
46 if isinstance(event, bb.command.CommandCompleted):
47 break
48 elif isinstance(event, bb.command.CommandFailed):
49 logger.error(str(event))
50 sys.exit(2)
51 elif isinstance(event, bb.event.GetTaskSignatureResult):
52 sig = event.sig
53 elif isinstance(event, logging.LogRecord):
54 logger.handle(event)
55 else:
56 logger.error("No result returned from getTaskSignatures command")
57 sys.exit(2)
58 return sig
59
60
61def parseRecipe(tinfoil, recipe):
62 try:
63 tinfoil.parse_recipes()
64 d = tinfoil.parse_recipe(recipe)
65 except Exception:
66 logger.error("Failed to get recipe info for: %s" % recipe)
67 sys.exit(1)
68 return d
69
70
71def bblockDump(lockfile):
72 try:
73 with open(lockfile, "r") as lockfile:
74 for line in lockfile:
75 print(line.strip())
76 except IOError:
77 return 1
78 return 0
79
80
81def bblockReset(lockfile, pns, package_archs, tasks):
82 if not pns:
83 logger.info("Unlocking all recipes")
84 try:
85 os.remove(lockfile)
86 except FileNotFoundError:
87 pass
88 else:
89 logger.info("Unlocking {pns}".format(pns=pns))
90 tmp_lockfile = lockfile + ".tmp"
91 with open(lockfile, "r") as infile, open(tmp_lockfile, "w") as outfile:
92 for line in infile:
93 if not (
94 any(element in line for element in pns)
95 and any(element in line for element in package_archs.split())
96 ):
97 outfile.write(line)
98 else:
99 if tasks and not any(element in line for element in tasks):
100 outfile.write(line)
101 os.remove(lockfile)
102 os.rename(tmp_lockfile, lockfile)
103
104
105def main():
106 parser = argparse_oe.ArgumentParser(description="Lock and unlock a recipe")
107 parser.add_argument("pn", nargs="*", help="Space separated list of recipe to lock")
108 parser.add_argument(
109 "-t",
110 "--tasks",
111 help="Comma separated list of tasks",
112 type=lambda s: [
113 task if task.startswith("do_") else "do_" + task for task in s.split(",")
114 ],
115 )
116 parser.add_argument(
117 "-r",
118 "--reset",
119 action="store_true",
120 help="Unlock pn recipes, or all recipes if pn is empty",
121 )
122 parser.add_argument(
123 "-d",
124 "--dump",
125 action="store_true",
126 help="Dump generated bblock.conf file",
127 )
128
129 global_args, unparsed_args = parser.parse_known_args()
130
131 with bb.tinfoil.Tinfoil() as tinfoil:
132 tinfoil.prepare(config_only=True)
133
134 package_archs = tinfoil.config_data.getVar("PACKAGE_ARCHS")
135 builddir = tinfoil.config_data.getVar("TOPDIR")
136 lockfile = "{builddir}/conf/bblock.conf".format(builddir=builddir)
137
138 if global_args.dump:
139 bblockDump(lockfile)
140 return 0
141
142 if global_args.reset:
143 bblockReset(lockfile, global_args.pn, package_archs, global_args.tasks)
144 return 0
145
146 with open(lockfile, "a") as lockfile:
147 s = ""
148 if lockfile.tell() == 0:
149 s = "# Generated by bblock\n"
150 s += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "info"\n'
151 s += 'SIGGEN_LOCKEDSIGS_TYPES += "${PACKAGE_ARCHS}"\n'
152 s += "\n"
153
154 for pn in global_args.pn:
155 d = parseRecipe(tinfoil, pn)
156 package_arch = d.getVar("PACKAGE_ARCH")
157 siggen_locked_sigs_package_arch = d.getVar(
158 "SIGGEN_LOCKEDSIGS_{package_arch}".format(package_arch=package_arch)
159 )
160 sigs = getTaskSignatures(tinfoil, [pn], global_args.tasks)
161 for sig in sigs:
162 new_entry = "{pn}:{taskname}:{sig}".format(
163 pn=sig[0], taskname=sig[1], sig=sig[2]
164 )
165 if (
166 siggen_locked_sigs_package_arch
167 and not new_entry in siggen_locked_sigs_package_arch
168 ) or not siggen_locked_sigs_package_arch:
169 s += 'SIGGEN_LOCKEDSIGS_{package_arch} += "{new_entry}"\n'.format(
170 package_arch=package_arch, new_entry=new_entry
171 )
172 lockfile.write(s)
173 return 0
174
175
176if __name__ == "__main__":
177 try:
178 ret = main()
179 except Exception:
180 ret = 1
181 import traceback
182
183 traceback.print_exc()
184 sys.exit(ret)
diff --git a/scripts/bitbake-prserv-tool b/scripts/bitbake-prserv-tool
index e55d98c72e..0559c4c38a 100755
--- a/scripts/bitbake-prserv-tool
+++ b/scripts/bitbake-prserv-tool
@@ -1,5 +1,7 @@
1#!/usr/bin/env bash 1#!/usr/bin/env bash
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
@@ -15,8 +17,11 @@ help ()
15clean_cache() 17clean_cache()
16{ 18{
17 s=`bitbake -e | grep ^CACHE= | cut -f2 -d\"` 19 s=`bitbake -e | grep ^CACHE= | cut -f2 -d\"`
20 # Stop any active memory resident server
21 bitbake -m
22 # Remove cache entries since we want to trigger a full reparse
18 if [ "x${s}" != "x" ]; then 23 if [ "x${s}" != "x" ]; then
19 rm -rf ${s} 24 rm -f ${s}/bb_cache*.dat.*
20 fi 25 fi
21} 26}
22 27
@@ -50,43 +55,6 @@ do_import ()
50 return $ret 55 return $ret
51} 56}
52 57
53do_migrate_localcount ()
54{
55 df=`bitbake -R conf/migrate_localcount.conf -e | \
56 grep ^LOCALCOUNT_DUMPFILE= | cut -f2 -d\"`
57 if [ "x${df}" == "x" ];
58 then
59 echo "LOCALCOUNT_DUMPFILE is not defined!"
60 return 1
61 fi
62
63 rm -rf $df
64 clean_cache
65 echo "Exporting LOCALCOUNT to AUTOINCs..."
66 bitbake -R conf/migrate_localcount.conf -p
67 [ ! $? -eq 0 ] && echo "Exporting to file $df failed!" && exit 1
68
69 if [ -e $df ];
70 then
71 echo "Exporting to file $df succeeded!"
72 else
73 echo "Exporting to file $df failed!"
74 exit 1
75 fi
76
77 echo "Importing generated AUTOINC entries..."
78 [ -e $df ] && do_import $df
79
80 if [ ! $? -eq 0 ]
81 then
82 echo "Migration from LOCALCOUNT to AUTOINCs failed!"
83 return 1
84 fi
85
86 echo "Migration from LOCALCOUNT to AUTOINCs succeeded!"
87 return 0
88}
89
90[ $# -eq 0 ] && help && exit 1 58[ $# -eq 0 ] && help && exit 1
91 59
92case $2 in 60case $2 in
@@ -105,9 +73,6 @@ export)
105import) 73import)
106 do_import $2 74 do_import $2
107 ;; 75 ;;
108migrate_localcount)
109 do_migrate_localcount
110 ;;
111*) 76*)
112 help 77 help
113 exit 1 78 exit 1
diff --git a/scripts/bitbake-whatchanged b/scripts/bitbake-whatchanged
deleted file mode 100755
index 6f4b268119..0000000000
--- a/scripts/bitbake-whatchanged
+++ /dev/null
@@ -1,320 +0,0 @@
1#!/usr/bin/env python3
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4
5# Copyright (c) 2013 Wind River Systems, Inc.
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import os
11import sys
12import getopt
13import shutil
14import re
15import warnings
16import subprocess
17import argparse
18
19scripts_path = os.path.abspath(os.path.dirname(os.path.abspath(sys.argv[0])))
20lib_path = scripts_path + '/lib'
21sys.path = sys.path + [lib_path]
22
23import scriptpath
24
25# Figure out where is the bitbake/lib/bb since we need bb.siggen and bb.process
26bitbakepath = scriptpath.add_bitbake_lib_path()
27if not bitbakepath:
28 sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n")
29 sys.exit(1)
30scriptpath.add_oe_lib_path()
31import argparse_oe
32
33import bb.siggen
34import bb.process
35
36# Match the stamp's filename
37# group(1): PE_PV (may no PE)
38# group(2): PR
39# group(3): TASK
40# group(4): HASH
41stamp_re = re.compile("(?P<pv>.*)-(?P<pr>r\d+)\.(?P<task>do_\w+)\.(?P<hash>[^\.]*)")
42sigdata_re = re.compile(".*\.sigdata\..*")
43
44def gen_dict(stamps):
45 """
46 Generate the dict from the stamps dir.
47 The output dict format is:
48 {fake_f: {pn: PN, pv: PV, pr: PR, task: TASK, path: PATH}}
49 Where:
50 fake_f: pv + task + hash
51 path: the path to the stamp file
52 """
53 # The member of the sub dict (A "path" will be appended below)
54 sub_mem = ("pv", "pr", "task")
55 d = {}
56 for dirpath, _, files in os.walk(stamps):
57 for f in files:
58 # The "bitbake -S" would generate ".sigdata", but no "_setscene".
59 fake_f = re.sub('_setscene.', '.', f)
60 fake_f = re.sub('.sigdata', '', fake_f)
61 subdict = {}
62 tmp = stamp_re.match(fake_f)
63 if tmp:
64 for i in sub_mem:
65 subdict[i] = tmp.group(i)
66 if len(subdict) != 0:
67 pn = os.path.basename(dirpath)
68 subdict['pn'] = pn
69 # The path will be used by os.stat() and bb.siggen
70 subdict['path'] = dirpath + "/" + f
71 fake_f = tmp.group('pv') + tmp.group('task') + tmp.group('hash')
72 d[fake_f] = subdict
73 return d
74
75# Re-construct the dict
76def recon_dict(dict_in):
77 """
78 The output dict format is:
79 {pn_task: {pv: PV, pr: PR, path: PATH}}
80 """
81 dict_out = {}
82 for k in dict_in.keys():
83 subdict = {}
84 # The key
85 pn_task = "%s_%s" % (dict_in.get(k).get('pn'), dict_in.get(k).get('task'))
86 # If more than one stamps are found, use the latest one.
87 if pn_task in dict_out:
88 full_path_pre = dict_out.get(pn_task).get('path')
89 full_path_cur = dict_in.get(k).get('path')
90 if os.stat(full_path_pre).st_mtime > os.stat(full_path_cur).st_mtime:
91 continue
92 subdict['pv'] = dict_in.get(k).get('pv')
93 subdict['pr'] = dict_in.get(k).get('pr')
94 subdict['path'] = dict_in.get(k).get('path')
95 dict_out[pn_task] = subdict
96
97 return dict_out
98
99def split_pntask(s):
100 """
101 Split the pn_task in to (pn, task) and return it
102 """
103 tmp = re.match("(.*)_(do_.*)", s)
104 return (tmp.group(1), tmp.group(2))
105
106
107def print_added(d_new = None, d_old = None):
108 """
109 Print the newly added tasks
110 """
111 added = {}
112 for k in list(d_new.keys()):
113 if k not in d_old:
114 # Add the new one to added dict, and remove it from
115 # d_new, so the remaining ones are the changed ones
116 added[k] = d_new.get(k)
117 del(d_new[k])
118
119 if not added:
120 return 0
121
122 # Format the output, the dict format is:
123 # {pn: task1, task2 ...}
124 added_format = {}
125 counter = 0
126 for k in added.keys():
127 pn, task = split_pntask(k)
128 if pn in added_format:
129 # Append the value
130 added_format[pn] = "%s %s" % (added_format.get(pn), task)
131 else:
132 added_format[pn] = task
133 counter += 1
134 print("=== Newly added tasks: (%s tasks)" % counter)
135 for k in added_format.keys():
136 print(" %s: %s" % (k, added_format.get(k)))
137
138 return counter
139
140def print_vrchanged(d_new = None, d_old = None, vr = None):
141 """
142 Print the pv or pr changed tasks.
143 The arg "vr" is "pv" or "pr"
144 """
145 pvchanged = {}
146 counter = 0
147 for k in list(d_new.keys()):
148 if d_new.get(k).get(vr) != d_old.get(k).get(vr):
149 counter += 1
150 pn, task = split_pntask(k)
151 if pn not in pvchanged:
152 # Format the output, we only print pn (no task) since
153 # all the tasks would be changed when pn or pr changed,
154 # the dict format is:
155 # {pn: pv/pr_old -> pv/pr_new}
156 pvchanged[pn] = "%s -> %s" % (d_old.get(k).get(vr), d_new.get(k).get(vr))
157 del(d_new[k])
158
159 if not pvchanged:
160 return 0
161
162 print("\n=== %s changed: (%s tasks)" % (vr.upper(), counter))
163 for k in pvchanged.keys():
164 print(" %s: %s" % (k, pvchanged.get(k)))
165
166 return counter
167
168def print_depchanged(d_new = None, d_old = None, verbose = False):
169 """
170 Print the dependency changes
171 """
172 depchanged = {}
173 counter = 0
174 for k in d_new.keys():
175 counter += 1
176 pn, task = split_pntask(k)
177 if (verbose):
178 full_path_old = d_old.get(k).get("path")
179 full_path_new = d_new.get(k).get("path")
180 # No counter since it is not ready here
181 if sigdata_re.match(full_path_old) and sigdata_re.match(full_path_new):
182 output = bb.siggen.compare_sigfiles(full_path_old, full_path_new)
183 if output:
184 print("\n=== The verbose changes of %s.%s:" % (pn, task))
185 print('\n'.join(output))
186 else:
187 # Format the output, the format is:
188 # {pn: task1, task2, ...}
189 if pn in depchanged:
190 depchanged[pn] = "%s %s" % (depchanged.get(pn), task)
191 else:
192 depchanged[pn] = task
193
194 if len(depchanged) > 0:
195 print("\n=== Dependencies changed: (%s tasks)" % counter)
196 for k in depchanged.keys():
197 print(" %s: %s" % (k, depchanged[k]))
198
199 return counter
200
201
202def main():
203 """
204 Print what will be done between the current and last builds:
205 1) Run "STAMPS_DIR=<path> bitbake -S recipe" to re-generate the stamps
206 2) Figure out what are newly added and changed, can't figure out
207 what are removed since we can't know the previous stamps
208 clearly, for example, if there are several builds, we can't know
209 which stamps the last build has used exactly.
210 3) Use bb.siggen.compare_sigfiles to diff the old and new stamps
211 """
212
213 parser = argparse_oe.ArgumentParser(usage = """%(prog)s [options] [package ...]
214print what will be done between the current and last builds, for example:
215
216 $ bitbake core-image-sato
217 # Edit the recipes
218 $ bitbake-whatchanged core-image-sato
219
220The changes will be printed.
221
222Note:
223 The amount of tasks is not accurate when the task is "do_build" since
224 it usually depends on other tasks.
225 The "nostamp" task is not included.
226"""
227)
228 parser.add_argument("recipe", help="recipe to check")
229 parser.add_argument("-v", "--verbose", help = "print the verbose changes", action = "store_true")
230 args = parser.parse_args()
231
232 # Get the STAMPS_DIR
233 print("Figuring out the STAMPS_DIR ...")
234 cmdline = "bitbake -e | sed -ne 's/^STAMPS_DIR=\"\(.*\)\"/\\1/p'"
235 try:
236 stampsdir, err = bb.process.run(cmdline)
237 except:
238 raise
239 if not stampsdir:
240 print("ERROR: No STAMPS_DIR found for '%s'" % args.recipe, file=sys.stderr)
241 return 2
242 stampsdir = stampsdir.rstrip("\n")
243 if not os.path.isdir(stampsdir):
244 print("ERROR: stamps directory \"%s\" not found!" % stampsdir, file=sys.stderr)
245 return 2
246
247 # The new stamps dir
248 new_stampsdir = stampsdir + ".bbs"
249 if os.path.exists(new_stampsdir):
250 print("ERROR: %s already exists!" % new_stampsdir, file=sys.stderr)
251 return 2
252
253 try:
254 # Generate the new stamps dir
255 print("Generating the new stamps ... (need several minutes)")
256 cmdline = "STAMPS_DIR=%s bitbake -S none %s" % (new_stampsdir, args.recipe)
257 # FIXME
258 # The "bitbake -S" may fail, not fatal error, the stamps will still
259 # be generated, this might be a bug of "bitbake -S".
260 try:
261 bb.process.run(cmdline)
262 except Exception as exc:
263 print(exc)
264
265 # The dict for the new and old stamps.
266 old_dict = gen_dict(stampsdir)
267 new_dict = gen_dict(new_stampsdir)
268
269 # Remove the same one from both stamps.
270 cnt_unchanged = 0
271 for k in list(new_dict.keys()):
272 if k in old_dict:
273 cnt_unchanged += 1
274 del(new_dict[k])
275 del(old_dict[k])
276
277 # Re-construct the dict to easily find out what is added or changed.
278 # The dict format is:
279 # {pn_task: {pv: PV, pr: PR, path: PATH}}
280 new_recon = recon_dict(new_dict)
281 old_recon = recon_dict(old_dict)
282
283 del new_dict
284 del old_dict
285
286 # Figure out what are changed, the new_recon would be changed
287 # by the print_xxx function.
288 # Newly added
289 cnt_added = print_added(new_recon, old_recon)
290
291 # PV (including PE) and PR changed
292 # Let the bb.siggen handle them if verbose
293 cnt_rv = {}
294 if not args.verbose:
295 for i in ('pv', 'pr'):
296 cnt_rv[i] = print_vrchanged(new_recon, old_recon, i)
297
298 # Dependencies changed (use bitbake-diffsigs)
299 cnt_dep = print_depchanged(new_recon, old_recon, args.verbose)
300
301 total_changed = cnt_added + (cnt_rv.get('pv') or 0) + (cnt_rv.get('pr') or 0) + cnt_dep
302
303 print("\n=== Summary: (%s changed, %s unchanged)" % (total_changed, cnt_unchanged))
304 if args.verbose:
305 print("Newly added: %s\nDependencies changed: %s\n" % \
306 (cnt_added, cnt_dep))
307 else:
308 print("Newly added: %s\nPV changed: %s\nPR changed: %s\nDependencies changed: %s\n" % \
309 (cnt_added, cnt_rv.get('pv') or 0, cnt_rv.get('pr') or 0, cnt_dep))
310 except:
311 print("ERROR occurred!")
312 raise
313 finally:
314 # Remove the newly generated stamps dir
315 if os.path.exists(new_stampsdir):
316 print("Removing the newly generated stamps dir ...")
317 shutil.rmtree(new_stampsdir)
318
319if __name__ == "__main__":
320 sys.exit(main())
diff --git a/scripts/buildhistory-diff b/scripts/buildhistory-diff
index 3bd40a2a1e..a6e785aa23 100755
--- a/scripts/buildhistory-diff
+++ b/scripts/buildhistory-diff
@@ -11,7 +11,6 @@
11import sys 11import sys
12import os 12import os
13import argparse 13import argparse
14from distutils.version import LooseVersion
15 14
16# Ensure PythonGit is installed (buildhistory_analysis needs it) 15# Ensure PythonGit is installed (buildhistory_analysis needs it)
17try: 16try:
@@ -73,10 +72,6 @@ def main():
73 parser = get_args_parser() 72 parser = get_args_parser()
74 args = parser.parse_args() 73 args = parser.parse_args()
75 74
76 if LooseVersion(git.__version__) < '0.3.1':
77 sys.stderr.write("Version of GitPython is too old, please install GitPython (python-git) 0.3.1 or later in order to use this script\n")
78 sys.exit(1)
79
80 if len(args.revisions) > 2: 75 if len(args.revisions) > 2:
81 sys.stderr.write('Invalid argument(s) specified: %s\n\n' % ' '.join(args.revisions[2:])) 76 sys.stderr.write('Invalid argument(s) specified: %s\n\n' % ' '.join(args.revisions[2:]))
82 parser.print_help() 77 parser.print_help()
diff --git a/scripts/buildstats-diff b/scripts/buildstats-diff
index 2f6498ab67..df1df432f1 100755
--- a/scripts/buildstats-diff
+++ b/scripts/buildstats-diff
@@ -1,4 +1,4 @@
1#!/usr/bin/python3 1#!/usr/bin/env python3
2# 2#
3# Script for comparing buildstats from two different builds 3# Script for comparing buildstats from two different builds
4# 4#
@@ -12,6 +12,7 @@ import glob
12import logging 12import logging
13import math 13import math
14import os 14import os
15import pathlib
15import sys 16import sys
16from operator import attrgetter 17from operator import attrgetter
17 18
@@ -251,11 +252,32 @@ Script for comparing buildstats of two separate builds."""
251 "average over them") 252 "average over them")
252 parser.add_argument('--only-task', dest='only_tasks', metavar='TASK', action='append', default=[], 253 parser.add_argument('--only-task', dest='only_tasks', metavar='TASK', action='append', default=[],
253 help="Only include TASK in report. May be specified multiple times") 254 help="Only include TASK in report. May be specified multiple times")
254 parser.add_argument('buildstats1', metavar='BUILDSTATS1', help="'Left' buildstat") 255 parser.add_argument('buildstats1', metavar='BUILDSTATS1', nargs="?", help="'Left' buildstat")
255 parser.add_argument('buildstats2', metavar='BUILDSTATS2', help="'Right' buildstat") 256 parser.add_argument('buildstats2', metavar='BUILDSTATS2', nargs="?", help="'Right' buildstat")
256 257
257 args = parser.parse_args(argv) 258 args = parser.parse_args(argv)
258 259
260 if args.buildstats1 and args.buildstats2:
261 # Both paths specified
262 pass
263 elif args.buildstats1 or args.buildstats2:
264 # Just one path specified, this is an error
265 parser.print_usage(sys.stderr)
266 print("Either specify two buildstats paths, or none to use the last two paths.", file=sys.stderr)
267 sys.exit(1)
268 else:
269 # No paths specified, try to find the last two buildstats
270 try:
271 buildstats_dir = pathlib.Path(os.environ["BUILDDIR"]) / "tmp" / "buildstats"
272 paths = sorted(buildstats_dir.iterdir())
273 args.buildstats2 = paths.pop()
274 args.buildstats1 = paths.pop()
275 print(f"Comparing {args.buildstats1} -> {args.buildstats2}\n")
276 except KeyError:
277 parser.print_usage(sys.stderr)
278 print("Build environment has not been configured, cannot find buildstats", file=sys.stderr)
279 sys.exit(1)
280
259 # We do not nedd/want to read all buildstats if we just want to look at the 281 # We do not nedd/want to read all buildstats if we just want to look at the
260 # package versions 282 # package versions
261 if args.ver_diff: 283 if args.ver_diff:
diff --git a/scripts/buildstats-summary b/scripts/buildstats-summary
new file mode 100755
index 0000000000..cc2a27722a
--- /dev/null
+++ b/scripts/buildstats-summary
@@ -0,0 +1,140 @@
1#!/usr/bin/env python3
2#
3# Dump a summary of the specified buildstats to the terminal, filtering and
4# sorting by walltime.
5#
6# SPDX-License-Identifier: GPL-2.0-only
7
8import argparse
9import dataclasses
10import datetime
11import enum
12import os
13import pathlib
14import sys
15
16scripts_path = os.path.dirname(os.path.realpath(__file__))
17sys.path.append(os.path.join(scripts_path, "lib"))
18import buildstats
19
20
21@dataclasses.dataclass
22class Task:
23 recipe: str
24 task: str
25 start: datetime.datetime
26 duration: datetime.timedelta
27
28
29class Sorting(enum.Enum):
30 start = 1
31 duration = 2
32
33 # argparse integration
34 def __str__(self) -> str:
35 return self.name
36
37 def __repr__(self) -> str:
38 return self.name
39
40 @staticmethod
41 def from_string(s: str):
42 try:
43 return Sorting[s]
44 except KeyError:
45 return s
46
47
48def read_buildstats(path: pathlib.Path) -> buildstats.BuildStats:
49 if not path.exists():
50 raise Exception(f"No such file or directory: {path}")
51 if path.is_file():
52 return buildstats.BuildStats.from_file_json(path)
53 if (path / "build_stats").is_file():
54 return buildstats.BuildStats.from_dir(path)
55 raise Exception(f"Cannot find buildstats in {path}")
56
57
58def dump_buildstats(args, bs: buildstats.BuildStats):
59 tasks = []
60 for recipe in bs.values():
61 for task, stats in recipe.tasks.items():
62 t = Task(
63 recipe.name,
64 task,
65 datetime.datetime.fromtimestamp(stats["start_time"]),
66 datetime.timedelta(seconds=int(stats.walltime)),
67 )
68 tasks.append(t)
69
70 tasks.sort(key=lambda t: getattr(t, args.sort.name))
71
72 minimum = datetime.timedelta(seconds=args.shortest)
73 highlight = datetime.timedelta(seconds=args.highlight)
74
75 for t in tasks:
76 if t.duration >= minimum:
77 line = f"{t.duration} {t.recipe}:{t.task}"
78 if args.highlight and t.duration >= highlight:
79 print(f"\033[1m{line}\033[0m")
80 else:
81 print(line)
82
83
84def main(argv=None) -> int:
85 parser = argparse.ArgumentParser(
86 formatter_class=argparse.ArgumentDefaultsHelpFormatter
87 )
88
89 parser.add_argument(
90 "buildstats",
91 metavar="BUILDSTATS",
92 nargs="?",
93 type=pathlib.Path,
94 help="Buildstats file, or latest if not specified",
95 )
96 parser.add_argument(
97 "--sort",
98 "-s",
99 type=Sorting.from_string,
100 choices=list(Sorting),
101 default=Sorting.start,
102 help="Sort tasks",
103 )
104 parser.add_argument(
105 "--shortest",
106 "-t",
107 type=int,
108 default=1,
109 metavar="SECS",
110 help="Hide tasks shorter than SECS seconds",
111 )
112 parser.add_argument(
113 "--highlight",
114 "-g",
115 type=int,
116 default=60,
117 metavar="SECS",
118 help="Highlight tasks longer than SECS seconds (0 disabled)",
119 )
120
121 args = parser.parse_args(argv)
122
123 # If a buildstats file wasn't specified, try to find the last one
124 if not args.buildstats:
125 try:
126 builddir = pathlib.Path(os.environ["BUILDDIR"])
127 buildstats_dir = builddir / "tmp" / "buildstats"
128 args.buildstats = sorted(buildstats_dir.iterdir())[-1]
129 except KeyError:
130 print("Build environment has not been configured, cannot find buildstats")
131 return 1
132
133 bs = read_buildstats(args.buildstats)
134 dump_buildstats(args, bs)
135
136 return 0
137
138
139if __name__ == "__main__":
140 sys.exit(main())
diff --git a/scripts/clean-hashserver-database b/scripts/clean-hashserver-database
new file mode 100755
index 0000000000..9fa162c981
--- /dev/null
+++ b/scripts/clean-hashserver-database
@@ -0,0 +1,77 @@
1#!/bin/bash
2set -euo pipefail
3
4SSTATE_DIR=""
5BB_HASHCLIENT=""
6BB_HASHSERVER=""
7
8ALIVE_DB_MARK="alive"
9CLEAN_DB="false"
10THRESHOLD_AGE="3600"
11
12function help() {
13 cat <<HELP_TEXT
14Usage: $0 --sstate-dir path --hashclient path --hashserver-address address \
15[--mark value] [--clean-db] [--threshold-age seconds]
16
17Auxiliary script remove unused or no longer relevant entries from the hashequivalence database, based
18on the files available on the sstate directory.
19
20 -h | --help) Show this help message and exit
21 -a | --hashserver-adress) bitbake-hashserver address
22 -c | --hashclient) Path to bitbake-hashclient
23 -m | --mark) Marker string to mark database entries
24 -s | --sstate-dir) Path to the sstate dir
25 -t | --threshold-age) Remove unused entries older than SECONDS old (default: 3600)
26 --clean-db) Remove all unmarked and unused entries from the database
27HELP_TEXT
28}
29
30function argument_parser() {
31 while [ $# -gt 0 ]; do
32 case "$1" in
33 -h | --help) help; exit 0 ;;
34 -a | --hashserver-address) BB_HASHSERVER="$2"; shift ;;
35 -c | --hashclient) BB_HASHCLIENT="$2"; shift ;;
36 -m | --mark) ALIVE_DB_MARK="$2"; shift ;;
37 -s | --sstate-dir) SSTATE_DIR="$2"; shift ;;
38 -t | --threshold-age) THRESHOLD_AGE="$2"; shift ;;
39 --clean-db) CLEAN_DB="true";;
40 *)
41 echo "Argument '$1' is not supported" >&2
42 help >&2
43 exit 1
44 ;;
45 esac
46 shift
47 done
48
49 function validate_mandatory_argument() {
50 local var_value="$1"
51 local error_message="$2"
52
53 if [ -z "$var_value" ]; then
54 echo "$error_message"
55 help >&2
56 exit 1
57 fi
58 }
59
60 validate_mandatory_argument "$SSTATE_DIR" "Please provide the path to the sstate dir."
61 validate_mandatory_argument "$BB_HASHCLIENT" "Please provide the path to bitbake-hashclient."
62 validate_mandatory_argument "$BB_HASHSERVER" "Please provide the address of bitbake-hashserver."
63}
64
65# -- main code --
66argument_parser $@
67
68# Mark all db sstate hashes
69find "$SSTATE_DIR" -name "*.tar.zst" | \
70sed 's/.*:\([^_]*\)_.*/unihash \1/' | \
71$BB_HASHCLIENT --address "$BB_HASHSERVER" gc-mark-stream "$ALIVE_DB_MARK"
72
73# Remove unmarked and unused entries
74if [ "$CLEAN_DB" = "true" ]; then
75 $BB_HASHCLIENT --address "$BB_HASHSERVER" gc-sweep "$ALIVE_DB_MARK"
76 $BB_HASHCLIENT --address "$BB_HASHSERVER" clean-unused "$THRESHOLD_AGE"
77fi
diff --git a/scripts/combo-layer b/scripts/combo-layer
index 045de65642..4a715914af 100755
--- a/scripts/combo-layer
+++ b/scripts/combo-layer
@@ -19,9 +19,8 @@ import tempfile
19import configparser 19import configparser
20import re 20import re
21import copy 21import copy
22import pipes 22import shlex
23import shutil 23import shutil
24from collections import OrderedDict
25from string import Template 24from string import Template
26from functools import reduce 25from functools import reduce
27 26
@@ -192,6 +191,23 @@ def runcmd(cmd,destdir=None,printerr=True,out=None,env=None):
192 logger.debug("output: %s" % output.replace(chr(0), '\\0')) 191 logger.debug("output: %s" % output.replace(chr(0), '\\0'))
193 return output 192 return output
194 193
194def action_sync_revs(conf, args):
195 """
196 Update the last_revision config option for each repo with the latest
197 revision in the remote's branch. Useful if multiple people are using
198 combo-layer.
199 """
200 repos = get_repos(conf, args[1:])
201
202 for name in repos:
203 repo = conf.repos[name]
204 ldir = repo['local_repo_dir']
205 branch = repo.get('branch', "master")
206 runcmd("git fetch", ldir)
207 lastrev = runcmd('git rev-parse origin/%s' % branch, ldir).strip()
208 print("Updating %s to %s" % (name, lastrev))
209 conf.update(name, "last_revision", lastrev)
210
195def action_init(conf, args): 211def action_init(conf, args):
196 """ 212 """
197 Clone component repositories 213 Clone component repositories
@@ -467,7 +483,7 @@ def check_repo_clean(repodir):
467 exit if repo is dirty 483 exit if repo is dirty
468 """ 484 """
469 output=runcmd("git status --porcelain", repodir) 485 output=runcmd("git status --porcelain", repodir)
470 r = re.compile('\?\? patch-.*/') 486 r = re.compile(r'\?\? patch-.*/')
471 dirtyout = [item for item in output.splitlines() if not r.match(item)] 487 dirtyout = [item for item in output.splitlines() if not r.match(item)]
472 if dirtyout: 488 if dirtyout:
473 logger.error("git repo %s is dirty, please fix it first", repodir) 489 logger.error("git repo %s is dirty, please fix it first", repodir)
@@ -508,7 +524,7 @@ def check_patch(patchfile):
508 f.close() 524 f.close()
509 if of: 525 if of:
510 of.close() 526 of.close()
511 bb.utils.rename(patchfile + '.tmp', patchfile) 527 os.rename(of.name, patchfile)
512 528
513def drop_to_shell(workdir=None): 529def drop_to_shell(workdir=None):
514 if not sys.stdin.isatty(): 530 if not sys.stdin.isatty():
@@ -1259,7 +1275,7 @@ def apply_commit(parent, rev, largs, wargs, dest_dir, file_filter=None):
1259 target = os.path.join(wargs["destdir"], dest_dir) 1275 target = os.path.join(wargs["destdir"], dest_dir)
1260 if not os.path.isdir(target): 1276 if not os.path.isdir(target):
1261 os.makedirs(target) 1277 os.makedirs(target)
1262 quoted_target = pipes.quote(target) 1278 quoted_target = shlex.quote(target)
1263 # os.sysconf('SC_ARG_MAX') is lying: running a command with 1279 # os.sysconf('SC_ARG_MAX') is lying: running a command with
1264 # string length 629343 already failed with "Argument list too 1280 # string length 629343 already failed with "Argument list too
1265 # long" although SC_ARG_MAX = 2097152. "man execve" explains 1281 # long" although SC_ARG_MAX = 2097152. "man execve" explains
@@ -1271,7 +1287,7 @@ def apply_commit(parent, rev, largs, wargs, dest_dir, file_filter=None):
1271 unquoted_args = [] 1287 unquoted_args = []
1272 cmdsize = 100 + len(quoted_target) 1288 cmdsize = 100 + len(quoted_target)
1273 while update: 1289 while update:
1274 quoted_next = pipes.quote(update[0]) 1290 quoted_next = shlex.quote(update[0])
1275 size_next = len(quoted_next) + len(dest_dir) + 1 1291 size_next = len(quoted_next) + len(dest_dir) + 1
1276 logger.debug('cmdline length %d + %d < %d?' % (cmdsize, size_next, os.sysconf('SC_ARG_MAX'))) 1292 logger.debug('cmdline length %d + %d < %d?' % (cmdsize, size_next, os.sysconf('SC_ARG_MAX')))
1277 if cmdsize + size_next < max_cmdsize: 1293 if cmdsize + size_next < max_cmdsize:
@@ -1302,6 +1318,7 @@ actions = {
1302 "update": action_update, 1318 "update": action_update,
1303 "pull": action_pull, 1319 "pull": action_pull,
1304 "splitpatch": action_splitpatch, 1320 "splitpatch": action_splitpatch,
1321 "sync-revs": action_sync_revs,
1305} 1322}
1306 1323
1307def main(): 1324def main():
@@ -1312,10 +1329,11 @@ def main():
1312Create and update a combination layer repository from multiple component repositories. 1329Create and update a combination layer repository from multiple component repositories.
1313 1330
1314Action: 1331Action:
1315 init initialise the combo layer repo 1332 init initialise the combo layer repo
1316 update [components] get patches from component repos and apply them to the combo repo 1333 update [components] get patches from component repos and apply them to the combo repo
1317 pull [components] just pull component repos only 1334 pull [components] just pull component repos only
1318 splitpatch [commit] generate commit patch and split per component, default commit is HEAD""") 1335 sync-revs [components] update the config file's last_revision for each repository
1336 splitpatch [commit] generate commit patch and split per component, default commit is HEAD""")
1319 1337
1320 parser.add_option("-c", "--conf", help = "specify the config file (conf/combo-layer.conf is the default).", 1338 parser.add_option("-c", "--conf", help = "specify the config file (conf/combo-layer.conf is the default).",
1321 action = "store", dest = "conffile", default = "conf/combo-layer.conf") 1339 action = "store", dest = "conffile", default = "conf/combo-layer.conf")
diff --git a/scripts/combo-layer-hook-default.sh b/scripts/combo-layer-hook-default.sh
index 11547a9826..fb9651b31f 100755
--- a/scripts/combo-layer-hook-default.sh
+++ b/scripts/combo-layer-hook-default.sh
@@ -1,5 +1,7 @@
1#!/bin/sh 1#!/bin/sh
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Hook to add source component/revision info to commit message 7# Hook to add source component/revision info to commit message
diff --git a/scripts/contrib/bb-perf/bb-matrix-plot.sh b/scripts/contrib/bb-perf/bb-matrix-plot.sh
index e7bd129e9e..6672189c95 100755
--- a/scripts/contrib/bb-perf/bb-matrix-plot.sh
+++ b/scripts/contrib/bb-perf/bb-matrix-plot.sh
@@ -16,8 +16,8 @@
16 16
17# Setup the defaults 17# Setup the defaults
18DATFILE="bb-matrix.dat" 18DATFILE="bb-matrix.dat"
19XLABEL="BB_NUMBER_THREADS" 19XLABEL="BB\\\\_NUMBER\\\\_THREADS"
20YLABEL="PARALLEL_MAKE" 20YLABEL="PARALLEL\\\\_MAKE"
21FIELD=3 21FIELD=3
22DEF_TITLE="Elapsed Time (seconds)" 22DEF_TITLE="Elapsed Time (seconds)"
23PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100" 23PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100"
diff --git a/scripts/contrib/bbvars.py b/scripts/contrib/bbvars.py
index 090133600b..a9cdf082ab 100755
--- a/scripts/contrib/bbvars.py
+++ b/scripts/contrib/bbvars.py
@@ -36,8 +36,8 @@ def bbvar_is_documented(var, documented_vars):
36def collect_documented_vars(docfiles): 36def collect_documented_vars(docfiles):
37 ''' Walk the docfiles and collect the documented variables ''' 37 ''' Walk the docfiles and collect the documented variables '''
38 documented_vars = [] 38 documented_vars = []
39 prog = re.compile(".*($|[^A-Z_])<glossentry id=\'var-") 39 prog = re.compile(r".*($|[^A-Z_])<glossentry id=\'var-")
40 var_prog = re.compile('<glossentry id=\'var-(.*)\'>') 40 var_prog = re.compile(r'<glossentry id=\'var-(.*)\'>')
41 for d in docfiles: 41 for d in docfiles:
42 with open(d) as f: 42 with open(d) as f:
43 documented_vars += var_prog.findall(f.read()) 43 documented_vars += var_prog.findall(f.read())
@@ -45,7 +45,7 @@ def collect_documented_vars(docfiles):
45 return documented_vars 45 return documented_vars
46 46
47def bbvar_doctag(var, docconf): 47def bbvar_doctag(var, docconf):
48 prog = re.compile('^%s\[doc\] *= *"(.*)"' % (var)) 48 prog = re.compile(r'^%s\[doc\] *= *"(.*)"' % (var))
49 if docconf == "": 49 if docconf == "":
50 return "?" 50 return "?"
51 51
diff --git a/scripts/contrib/build-perf-test-wrapper.sh b/scripts/contrib/build-perf-test-wrapper.sh
index fa71d4a2e9..0a85e6e708 100755
--- a/scripts/contrib/build-perf-test-wrapper.sh
+++ b/scripts/contrib/build-perf-test-wrapper.sh
@@ -87,21 +87,10 @@ if [ $# -ne 0 ]; then
87 exit 1 87 exit 1
88fi 88fi
89 89
90if [ -n "$email_to" ]; then
91 if ! [ -x "$(command -v phantomjs)" ]; then
92 echo "ERROR: Sending email needs phantomjs."
93 exit 1
94 fi
95 if ! [ -x "$(command -v optipng)" ]; then
96 echo "ERROR: Sending email needs optipng."
97 exit 1
98 fi
99fi
100
101# Open a file descriptor for flock and acquire lock 90# Open a file descriptor for flock and acquire lock
102LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock" 91LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock"
103if ! exec 3> "$LOCK_FILE"; then 92if ! exec 3> "$LOCK_FILE"; then
104 echo "ERROR: Unable to open lock file" 93 echo "ERROR: Unable to open loemack file"
105 exit 1 94 exit 1
106fi 95fi
107if ! flock -n 3; then 96if ! flock -n 3; then
@@ -226,7 +215,7 @@ if [ -n "$results_repo" ]; then
226 if [ -n "$email_to" ]; then 215 if [ -n "$email_to" ]; then
227 echo "Emailing test report" 216 echo "Emailing test report"
228 os_name=`get_os_release_var PRETTY_NAME` 217 os_name=`get_os_release_var PRETTY_NAME`
229 "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt --html $report_html "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}" 218 "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}"
230 fi 219 fi
231 220
232 # Upload report files, unless we're on detached head 221 # Upload report files, unless we're on detached head
diff --git a/scripts/contrib/convert-overrides.py b/scripts/contrib/convert-overrides.py
index 4d41a4c475..c69acb4095 100755
--- a/scripts/contrib/convert-overrides.py
+++ b/scripts/contrib/convert-overrides.py
@@ -22,66 +22,78 @@ import sys
22import tempfile 22import tempfile
23import shutil 23import shutil
24import mimetypes 24import mimetypes
25import argparse
25 26
26if len(sys.argv) < 2: 27parser = argparse.ArgumentParser(description="Convert override syntax")
27 print("Please specify a directory to run the conversion script against.") 28parser.add_argument("--override", "-o", action="append", default=[], help="Add additional strings to consider as an override (e.g. custom machines/distros")
28 sys.exit(1) 29parser.add_argument("--skip", "-s", action="append", default=[], help="Add additional string to skip and not consider an override")
30parser.add_argument("--skip-ext", "-e", action="append", default=[], help="Additional file suffixes to skip when processing (e.g. '.foo')")
31parser.add_argument("--package-vars", action="append", default=[], help="Additional variables to treat as package variables")
32parser.add_argument("--image-vars", action="append", default=[], help="Additional variables to treat as image variables")
33parser.add_argument("--short-override", action="append", default=[], help="Additional strings to treat as short overrides")
34parser.add_argument("path", nargs="+", help="Paths to convert")
35
36args = parser.parse_args()
29 37
30# List of strings to treat as overrides 38# List of strings to treat as overrides
31vars = ["append", "prepend", "remove"] 39vars = args.override
32vars = vars + ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"] 40vars += ["append", "prepend", "remove"]
33vars = vars + ["genericx86", "edgerouter", "beaglebone-yocto"] 41vars += ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"]
34vars = vars + ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"] 42vars += ["genericx86", "edgerouter", "beaglebone-yocto"]
35vars = vars + ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"] 43vars += ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"]
36vars = vars + ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"] 44vars += ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"]
37vars = vars + ["tune-", "pn-", "forcevariable"] 45vars += ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"]
38vars = vars + ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"] 46vars += ["tune-", "pn-", "forcevariable"]
39vars = vars + ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"] 47vars += ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"]
40vars = vars + ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"] 48vars += ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"]
41vars = vars + ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"] 49vars += ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"]
42vars = vars + ["linux-gnueabi", "eabi"] 50vars += ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"]
43vars = vars + ["virtclass-multilib", "virtclass-mcextend"] 51vars += ["linux-gnueabi", "eabi"]
52vars += ["virtclass-multilib", "virtclass-mcextend"]
44 53
45# List of strings to treat as overrides but only with whitespace following or another override (more restricted matching). 54# List of strings to treat as overrides but only with whitespace following or another override (more restricted matching).
46# Handles issues with arc matching arch. 55# Handles issues with arc matching arch.
47shortvars = ["arc", "mips", "mipsel", "sh4"] 56shortvars = ["arc", "mips", "mipsel", "sh4"] + args.short_override
48 57
49# Variables which take packagenames as an override 58# Variables which take packagenames as an override
50packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY", 59packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY",
51 "pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE", 60 "pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE",
52 "PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst", 61 "PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst",
53 "pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA", 62 "pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA",
54 "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"] 63 "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"] + args.package_vars
55 64
56# Expressions to skip if encountered, these are not overrides 65# Expressions to skip if encountered, these are not overrides
57skips = ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"] 66skips = args.skip
58skips = skips + ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"] 67skips += ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"]
59skips = skips + ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"] 68skips += ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"]
60skips = skips + ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"] 69skips += ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"]
61skips = skips + ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"] 70skips += ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"]
62skips = skips + ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"] 71skips += ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"]
63skips = skips + ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"] 72skips += ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"]
64skips = skips + ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"] 73skips += ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"]
65skips = skips + ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"] 74skips += ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"]
66 75skips += ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"]
67imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"] 76
68packagevars = packagevars + imagevars 77imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"] + args.image_vars
78packagevars += imagevars
79
80skip_ext = [".html", ".patch", ".m4", ".diff"] + args.skip_ext
69 81
70vars_re = {} 82vars_re = {}
71for exp in vars: 83for exp in vars:
72 vars_re[exp] = (re.compile('((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp) 84 vars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp)
73 85
74shortvars_re = {} 86shortvars_re = {}
75for exp in shortvars: 87for exp in shortvars:
76 shortvars_re[exp] = (re.compile('((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + '([\(\'"\s:])'), r"\1:" + exp + r"\3") 88 shortvars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + r'([\(\'"\s:])'), r"\1:" + exp + r"\3")
77 89
78package_re = {} 90package_re = {}
79for exp in packagevars: 91for exp in packagevars:
80 package_re[exp] = (re.compile('(^|[#\'"\s\-\+]+)' + exp + '_' + '([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2") 92 package_re[exp] = (re.compile(r'(^|[#\'"\s\-\+]+)' + exp + r'_' + r'([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2")
81 93
82# Other substitutions to make 94# Other substitutions to make
83subs = { 95subs = {
84 'r = re.compile("([^:]+):\s*(.*)")' : 'r = re.compile("(^.+?):\s+(.*)")', 96 'r = re.compile(r"([^:]+):\s*(.*)")' : 'r = re.compile(r"(^.+?):\s+(.*)")',
85 "val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))", 97 "val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))",
86 "f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))", 98 "f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))",
87 "d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))", 99 "d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))",
@@ -124,21 +136,20 @@ def processfile(fn):
124ourname = os.path.basename(sys.argv[0]) 136ourname = os.path.basename(sys.argv[0])
125ourversion = "0.9.3" 137ourversion = "0.9.3"
126 138
127if os.path.isfile(sys.argv[1]): 139for p in args.path:
128 processfile(sys.argv[1]) 140 if os.path.isfile(p):
129 sys.exit(0) 141 processfile(p)
130 142 else:
131for targetdir in sys.argv[1:]: 143 print("processing directory '%s'" % p)
132 print("processing directory '%s'" % targetdir) 144 for root, dirs, files in os.walk(p):
133 for root, dirs, files in os.walk(targetdir): 145 for name in files:
134 for name in files: 146 if name == ourname:
135 if name == ourname: 147 continue
136 continue 148 fn = os.path.join(root, name)
137 fn = os.path.join(root, name) 149 if os.path.islink(fn):
138 if os.path.islink(fn): 150 continue
139 continue 151 if "/.git/" in fn or any(fn.endswith(ext) for ext in skip_ext):
140 if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"): 152 continue
141 continue 153 processfile(fn)
142 processfile(fn)
143 154
144print("All files processed with version %s" % ourversion) 155print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-spdx-licenses.py b/scripts/contrib/convert-spdx-licenses.py
new file mode 100755
index 0000000000..13cf12a33f
--- /dev/null
+++ b/scripts/contrib/convert-spdx-licenses.py
@@ -0,0 +1,145 @@
1#!/usr/bin/env python3
2#
3# Conversion script to change LICENSE entries to SPDX identifiers
4#
5# Copyright (C) 2021-2022 Richard Purdie
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import re
11import os
12import sys
13import tempfile
14import shutil
15import mimetypes
16
17if len(sys.argv) < 2:
18 print("Please specify a directory to run the conversion script against.")
19 sys.exit(1)
20
21license_map = {
22"AGPL-3" : "AGPL-3.0-only",
23"AGPL-3+" : "AGPL-3.0-or-later",
24"AGPLv3" : "AGPL-3.0-only",
25"AGPLv3+" : "AGPL-3.0-or-later",
26"AGPLv3.0" : "AGPL-3.0-only",
27"AGPLv3.0+" : "AGPL-3.0-or-later",
28"AGPL-3.0" : "AGPL-3.0-only",
29"AGPL-3.0+" : "AGPL-3.0-or-later",
30"BSD-0-Clause" : "0BSD",
31"GPL-1" : "GPL-1.0-only",
32"GPL-1+" : "GPL-1.0-or-later",
33"GPLv1" : "GPL-1.0-only",
34"GPLv1+" : "GPL-1.0-or-later",
35"GPLv1.0" : "GPL-1.0-only",
36"GPLv1.0+" : "GPL-1.0-or-later",
37"GPL-1.0" : "GPL-1.0-only",
38"GPL-1.0+" : "GPL-1.0-or-later",
39"GPL-2" : "GPL-2.0-only",
40"GPL-2+" : "GPL-2.0-or-later",
41"GPLv2" : "GPL-2.0-only",
42"GPLv2+" : "GPL-2.0-or-later",
43"GPLv2.0" : "GPL-2.0-only",
44"GPLv2.0+" : "GPL-2.0-or-later",
45"GPL-2.0" : "GPL-2.0-only",
46"GPL-2.0+" : "GPL-2.0-or-later",
47"GPL-3" : "GPL-3.0-only",
48"GPL-3+" : "GPL-3.0-or-later",
49"GPLv3" : "GPL-3.0-only",
50"GPLv3+" : "GPL-3.0-or-later",
51"GPLv3.0" : "GPL-3.0-only",
52"GPLv3.0+" : "GPL-3.0-or-later",
53"GPL-3.0" : "GPL-3.0-only",
54"GPL-3.0+" : "GPL-3.0-or-later",
55"LGPLv2" : "LGPL-2.0-only",
56"LGPLv2+" : "LGPL-2.0-or-later",
57"LGPLv2.0" : "LGPL-2.0-only",
58"LGPLv2.0+" : "LGPL-2.0-or-later",
59"LGPL-2.0" : "LGPL-2.0-only",
60"LGPL-2.0+" : "LGPL-2.0-or-later",
61"LGPL2.1" : "LGPL-2.1-only",
62"LGPL2.1+" : "LGPL-2.1-or-later",
63"LGPLv2.1" : "LGPL-2.1-only",
64"LGPLv2.1+" : "LGPL-2.1-or-later",
65"LGPL-2.1" : "LGPL-2.1-only",
66"LGPL-2.1+" : "LGPL-2.1-or-later",
67"LGPLv3" : "LGPL-3.0-only",
68"LGPLv3+" : "LGPL-3.0-or-later",
69"LGPL-3.0" : "LGPL-3.0-only",
70"LGPL-3.0+" : "LGPL-3.0-or-later",
71"MPL-1" : "MPL-1.0",
72"MPLv1" : "MPL-1.0",
73"MPLv1.1" : "MPL-1.1",
74"MPLv2" : "MPL-2.0",
75"MIT-X" : "MIT",
76"MIT-style" : "MIT",
77"openssl" : "OpenSSL",
78"PSF" : "PSF-2.0",
79"PSFv2" : "PSF-2.0",
80"Python-2" : "Python-2.0",
81"Apachev2" : "Apache-2.0",
82"Apache-2" : "Apache-2.0",
83"Artisticv1" : "Artistic-1.0",
84"Artistic-1" : "Artistic-1.0",
85"AFL-2" : "AFL-2.0",
86"AFL-1" : "AFL-1.2",
87"AFLv2" : "AFL-2.0",
88"AFLv1" : "AFL-1.2",
89"CDDLv1" : "CDDL-1.0",
90"CDDL-1" : "CDDL-1.0",
91"EPLv1.0" : "EPL-1.0",
92"FreeType" : "FTL",
93"Nauman" : "Naumen",
94"tcl" : "TCL",
95"vim" : "Vim",
96"SGIv1" : "SGI-OpenGL",
97}
98
99def processfile(fn):
100 print("processing file '%s'" % fn)
101 try:
102 fh, abs_path = tempfile.mkstemp()
103 modified = False
104 with os.fdopen(fh, 'w') as new_file:
105 with open(fn, "r") as old_file:
106 for line in old_file:
107 if not line.startswith("LICENSE"):
108 new_file.write(line)
109 continue
110 orig = line
111 for license in sorted(license_map, key=len, reverse=True):
112 for ending in ['"', "'", " ", ")"]:
113 line = line.replace(license + ending, license_map[license] + ending)
114 if orig != line:
115 modified = True
116 new_file.write(line)
117 new_file.close()
118 if modified:
119 shutil.copymode(fn, abs_path)
120 os.remove(fn)
121 shutil.move(abs_path, fn)
122 except UnicodeDecodeError:
123 pass
124
125ourname = os.path.basename(sys.argv[0])
126ourversion = "0.01"
127
128if os.path.isfile(sys.argv[1]):
129 processfile(sys.argv[1])
130 sys.exit(0)
131
132for targetdir in sys.argv[1:]:
133 print("processing directory '%s'" % targetdir)
134 for root, dirs, files in os.walk(targetdir):
135 for name in files:
136 if name == ourname:
137 continue
138 fn = os.path.join(root, name)
139 if os.path.islink(fn):
140 continue
141 if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
142 continue
143 processfile(fn)
144
145print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-variable-renames.py b/scripts/contrib/convert-variable-renames.py
new file mode 100755
index 0000000000..eded90ca61
--- /dev/null
+++ b/scripts/contrib/convert-variable-renames.py
@@ -0,0 +1,116 @@
1#!/usr/bin/env python3
2#
3# Conversion script to rename variables to versions with improved terminology.
4# Also highlights potentially problematic language and removed variables.
5#
6# Copyright (C) 2021 Richard Purdie
7# Copyright (C) 2022 Wind River Systems, Inc.
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12import re
13import os
14import sys
15import tempfile
16import shutil
17import mimetypes
18
19if len(sys.argv) < 2:
20 print("Please specify a directory to run the conversion script against.")
21 sys.exit(1)
22
23renames = {
24"BB_ENV_WHITELIST" : "BB_ENV_PASSTHROUGH",
25"BB_ENV_EXTRAWHITE" : "BB_ENV_PASSTHROUGH_ADDITIONS",
26"BB_HASHCONFIG_WHITELIST" : "BB_HASHCONFIG_IGNORE_VARS",
27"BB_SETSCENE_ENFORCE_WHITELIST" : "BB_SETSCENE_ENFORCE_IGNORE_TASKS",
28"BB_HASHBASE_WHITELIST" : "BB_BASEHASH_IGNORE_VARS",
29"BB_HASHTASK_WHITELIST" : "BB_TASKHASH_IGNORE_TASKS",
30"CVE_CHECK_PN_WHITELIST" : "CVE_CHECK_SKIP_RECIPE",
31"CVE_CHECK_WHITELIST" : "CVE_CHECK_IGNORE",
32"MULTI_PROVIDER_WHITELIST" : "BB_MULTI_PROVIDER_ALLOWED",
33"PNBLACKLIST" : "SKIP_RECIPE",
34"SDK_LOCAL_CONF_BLACKLIST" : "ESDK_LOCALCONF_REMOVE",
35"SDK_LOCAL_CONF_WHITELIST" : "ESDK_LOCALCONF_ALLOW",
36"SDK_INHERIT_BLACKLIST" : "ESDK_CLASS_INHERIT_DISABLE",
37"SSTATE_DUPWHITELIST" : "SSTATE_ALLOW_OVERLAP_FILES",
38"SYSROOT_DIRS_BLACKLIST" : "SYSROOT_DIRS_IGNORE",
39"UNKNOWN_CONFIGURE_WHITELIST" : "UNKNOWN_CONFIGURE_OPT_IGNORE",
40"ICECC_USER_CLASS_BL" : "ICECC_CLASS_DISABLE",
41"ICECC_SYSTEM_CLASS_BL" : "ICECC_CLASS_DISABLE",
42"ICECC_USER_PACKAGE_WL" : "ICECC_RECIPE_ENABLE",
43"ICECC_USER_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
44"ICECC_SYSTEM_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
45"LICENSE_FLAGS_WHITELIST" : "LICENSE_FLAGS_ACCEPTED",
46}
47
48removed_list = [
49"BB_STAMP_WHITELIST",
50"BB_STAMP_POLICY",
51"INHERIT_BLACKLIST",
52"TUNEABI_WHITELIST",
53]
54
55context_check_list = [
56"blacklist",
57"whitelist",
58"abort",
59]
60
61def processfile(fn):
62
63 print("processing file '%s'" % fn)
64 try:
65 fh, abs_path = tempfile.mkstemp()
66 modified = False
67 with os.fdopen(fh, 'w') as new_file:
68 with open(fn, "r") as old_file:
69 lineno = 0
70 for line in old_file:
71 lineno += 1
72 if not line or "BB_RENAMED_VARIABLE" in line:
73 continue
74 # Do the renames
75 for old_name, new_name in renames.items():
76 if old_name in line:
77 line = line.replace(old_name, new_name)
78 modified = True
79 # Find removed names
80 for removed_name in removed_list:
81 if removed_name in line:
82 print("%s needs further work at line %s because %s has been deprecated" % (fn, lineno, removed_name))
83 for check_word in context_check_list:
84 if re.search(check_word, line, re.IGNORECASE):
85 print("%s needs further work at line %s since it contains %s"% (fn, lineno, check_word))
86 new_file.write(line)
87 new_file.close()
88 if modified:
89 print("*** Modified file '%s'" % (fn))
90 shutil.copymode(fn, abs_path)
91 os.remove(fn)
92 shutil.move(abs_path, fn)
93 except UnicodeDecodeError:
94 pass
95
96ourname = os.path.basename(sys.argv[0])
97ourversion = "0.1"
98
99if os.path.isfile(sys.argv[1]):
100 processfile(sys.argv[1])
101 sys.exit(0)
102
103for targetdir in sys.argv[1:]:
104 print("processing directory '%s'" % targetdir)
105 for root, dirs, files in os.walk(targetdir):
106 for name in files:
107 if name == ourname:
108 continue
109 fn = os.path.join(root, name)
110 if os.path.islink(fn):
111 continue
112 if "ChangeLog" in fn or "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
113 continue
114 processfile(fn)
115
116print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/ddimage b/scripts/contrib/ddimage
index 7f2ad112a6..70eee8ebea 100755
--- a/scripts/contrib/ddimage
+++ b/scripts/contrib/ddimage
@@ -1,5 +1,7 @@
1#!/bin/sh 1#!/bin/sh
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
diff --git a/scripts/contrib/dialog-power-control b/scripts/contrib/dialog-power-control
index ad6070c369..82c84baa1d 100755
--- a/scripts/contrib/dialog-power-control
+++ b/scripts/contrib/dialog-power-control
@@ -1,5 +1,7 @@
1#!/bin/sh 1#!/bin/sh
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Simple script to show a manual power prompt for when you want to use 7# Simple script to show a manual power prompt for when you want to use
diff --git a/scripts/contrib/documentation-audit.sh b/scripts/contrib/documentation-audit.sh
index f436f9bae0..7197a2fcea 100755
--- a/scripts/contrib/documentation-audit.sh
+++ b/scripts/contrib/documentation-audit.sh
@@ -1,5 +1,7 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Perform an audit of which packages provide documentation and which 7# Perform an audit of which packages provide documentation and which
@@ -26,8 +28,8 @@ if [ -z "$BITBAKE" ]; then
26fi 28fi
27 29
28echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results" 30echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results"
29echo "REMINDER: you need to set LICENSE_FLAGS_WHITELIST appropriately in local.conf or " 31echo "REMINDER: you need to set LICENSE_FLAGS_ACCEPTED appropriately in local.conf or "
30echo " you'll get false positives. For example, LICENSE_FLAGS_WHITELIST = \"commercial\"" 32echo " you'll get false positives. For example, LICENSE_FLAGS_ACCEPTED = \"commercial\""
31 33
32for pkg in `bitbake -s | awk '{ print \$1 }'`; do 34for pkg in `bitbake -s | awk '{ print \$1 }'`; do
33 if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" || 35 if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" ||
diff --git a/scripts/contrib/image-manifest b/scripts/contrib/image-manifest
index 3c07a73a4e..4d65a99258 100755
--- a/scripts/contrib/image-manifest
+++ b/scripts/contrib/image-manifest
@@ -392,7 +392,7 @@ def export_manifest_info(args):
392 for key in rd.getVarFlags('PACKAGECONFIG').keys(): 392 for key in rd.getVarFlags('PACKAGECONFIG').keys():
393 if key == 'doc': 393 if key == 'doc':
394 continue 394 continue
395 rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key, True) 395 rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key)
396 396
397 if config['patches'] == 'yes': 397 if config['patches'] == 'yes':
398 patches = oe.recipeutils.get_recipe_patches(rd) 398 patches = oe.recipeutils.get_recipe_patches(rd)
diff --git a/scripts/contrib/improve_kernel_cve_report.py b/scripts/contrib/improve_kernel_cve_report.py
new file mode 100755
index 0000000000..829cc4cd30
--- /dev/null
+++ b/scripts/contrib/improve_kernel_cve_report.py
@@ -0,0 +1,467 @@
1#! /usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# The script uses another source of CVE information from linux-vulns
6# to enrich the cve-summary from cve-check or vex.
7# It can also use the list of compiled files from the kernel spdx to ignore CVEs
8# that are not affected since the files are not compiled.
9#
10# It creates a new json file with updated CVE information
11#
12# Compiled files can be extracted adding the following in local.conf
13# SPDX_INCLUDE_COMPILED_SOURCES:pn-linux-yocto = "1"
14#
15# Tested with the following CVE sources:
16# - https://git.kernel.org/pub/scm/linux/security/vulns.git
17# - https://github.com/CVEProject/cvelistV5
18#
19# Example:
20# python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --kernel-version 6.12.27 --datadir ./vulns
21# python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --datadir ./vulns --old-cve-report build/tmp/log/cve/cve-summary.json
22#
23# SPDX-License-Identifier: GPLv2
24
25import argparse
26import json
27import sys
28import logging
29import glob
30import os
31import pathlib
32from packaging.version import Version
33
34def is_linux_cve(cve_info):
35 '''Return true is the CVE belongs to Linux'''
36 if not "affected" in cve_info["containers"]["cna"]:
37 return False
38 for affected in cve_info["containers"]["cna"]["affected"]:
39 if not "product" in affected:
40 return False
41 if affected["product"] == "Linux" and affected["vendor"] == "Linux":
42 return True
43 return False
44
45def get_kernel_cves(datadir, compiled_files, version):
46 """
47 Get CVEs for the kernel
48 """
49 cves = {}
50
51 check_config = len(compiled_files) > 0
52
53 base_version = Version(f"{version.major}.{version.minor}")
54
55 # Check all CVES from kernel vulns
56 pattern = os.path.join(datadir, '**', "CVE-*.json")
57 cve_files = glob.glob(pattern, recursive=True)
58 not_applicable_config = 0
59 fixed_as_later_backport = 0
60 vulnerable = 0
61 not_vulnerable = 0
62 for cve_file in sorted(cve_files):
63 cve_info = {}
64 with open(cve_file, "r", encoding='ISO-8859-1') as f:
65 cve_info = json.load(f)
66
67 if len(cve_info) == 0:
68 logging.error("Not valid data in %s. Aborting", cve_file)
69 break
70
71 if not is_linux_cve(cve_info):
72 continue
73 cve_id = os.path.basename(cve_file)[:-5]
74 description = cve_info["containers"]["cna"]["descriptions"][0]["value"]
75 if cve_file.find("rejected") >= 0:
76 logging.debug("%s is rejected by the CNA", cve_id)
77 cves[cve_id] = {
78 "id": cve_id,
79 "status": "Ignored",
80 "detail": "rejected",
81 "summary": description,
82 "description": f"Rejected by CNA"
83 }
84 continue
85 if any(elem in cve_file for elem in ["review", "reverved", "testing"]):
86 continue
87
88 is_vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected_versions = get_cpe_applicability(cve_info, version)
89
90 logging.debug("%s: %s (%s - %s) (%s - %s)", cve_id, is_vulnerable, better_match_first, better_match_last, first_affected, last_affected)
91
92 if is_vulnerable is None:
93 logging.warning("%s doesn't have good metadata", cve_id)
94 if is_vulnerable:
95 is_affected = True
96 affected_files = []
97 if check_config:
98 is_affected, affected_files = check_kernel_compiled_files(compiled_files, cve_info)
99
100 if not is_affected and len(affected_files) > 0:
101 logging.debug(
102 "%s - not applicable configuration since affected files not compiled: %s",
103 cve_id, affected_files)
104 cves[cve_id] = {
105 "id": cve_id,
106 "status": "Ignored",
107 "detail": "not-applicable-config",
108 "summary": description,
109 "description": f"Source code not compiled by config. {affected_files}"
110 }
111 not_applicable_config +=1
112 # Check if we have backport
113 else:
114 if not better_match_last:
115 fixed_in = last_affected
116 else:
117 fixed_in = better_match_last
118 logging.debug("%s needs backporting (fixed from %s)", cve_id, fixed_in)
119 cves[cve_id] = {
120 "id": cve_id,
121 "status": "Unpatched",
122 "detail": "version-in-range",
123 "summary": description,
124 "description": f"Needs backporting (fixed from {fixed_in})"
125 }
126 vulnerable += 1
127 if (better_match_last and
128 Version(f"{better_match_last.major}.{better_match_last.minor}") == base_version):
129 fixed_as_later_backport += 1
130 # Not vulnerable
131 else:
132 if not first_affected:
133 logging.debug("%s - not known affected %s",
134 cve_id,
135 better_match_last)
136 cves[cve_id] = {
137 "id": cve_id,
138 "status": "Patched",
139 "detail": "version-not-in-range",
140 "summary": description,
141 "description": "No CPE match"
142 }
143 not_vulnerable += 1
144 continue
145 backport_base = Version(f"{better_match_last.major}.{better_match_last.minor}")
146 if version < first_affected:
147 logging.debug('%s - fixed-version: only affects %s onwards',
148 cve_id,
149 first_affected)
150 cves[cve_id] = {
151 "id": cve_id,
152 "status": "Patched",
153 "detail": "fixed-version",
154 "summary": description,
155 "description": f"only affects {first_affected} onwards"
156 }
157 not_vulnerable += 1
158 elif last_affected <= version:
159 logging.debug("%s - fixed-version: Fixed from version %s",
160 cve_id,
161 last_affected)
162 cves[cve_id] = {
163 "id": cve_id,
164 "status": "Patched",
165 "detail": "fixed-version",
166 "summary": description,
167 "description": f"fixed-version: Fixed from version {last_affected}"
168 }
169 not_vulnerable += 1
170 elif backport_base == base_version:
171 logging.debug("%s - cpe-stable-backport: Backported in %s",
172 cve_id,
173 better_match_last)
174 cves[cve_id] = {
175 "id": cve_id,
176 "status": "Patched",
177 "detail": "cpe-stable-backport",
178 "summary": description,
179 "description": f"Backported in {better_match_last}"
180 }
181 not_vulnerable += 1
182 else:
183 logging.debug("%s - version not affected %s", cve_id, str(affected_versions))
184 cves[cve_id] = {
185 "id": cve_id,
186 "status": "Patched",
187 "detail": "version-not-in-range",
188 "summary": description,
189 "description": f"Range {affected_versions}"
190 }
191 not_vulnerable += 1
192
193 logging.info("Total CVEs ignored due to not applicable config: %d", not_applicable_config)
194 logging.info("Total CVEs not vulnerable due version-not-in-range: %d", not_vulnerable)
195 logging.info("Total vulnerable CVEs: %d", vulnerable)
196
197 logging.info("Total CVEs already backported in %s: %s", base_version,
198 fixed_as_later_backport)
199 return cves
200
201def read_spdx(spdx_file):
202 '''Open SPDX file and extract compiled files'''
203 with open(spdx_file, 'r', encoding='ISO-8859-1') as f:
204 spdx = json.load(f)
205 if "spdxVersion" in spdx:
206 if spdx["spdxVersion"] == "SPDX-2.2":
207 return read_spdx2(spdx)
208 if "@graph" in spdx:
209 return read_spdx3(spdx)
210 return []
211
212def read_spdx2(spdx):
213 '''
214 Read spdx2 compiled files from spdx
215 '''
216 cfiles = set()
217 if 'files' not in spdx:
218 return cfiles
219 for item in spdx['files']:
220 for ftype in item['fileTypes']:
221 if ftype == "SOURCE":
222 filename = item["fileName"][item["fileName"].find("/")+1:]
223 cfiles.add(filename)
224 return cfiles
225
226def read_spdx3(spdx):
227 '''
228 Read spdx3 compiled files from spdx
229 '''
230 cfiles = set()
231 for item in spdx["@graph"]:
232 if "software_primaryPurpose" not in item:
233 continue
234 if item["software_primaryPurpose"] == "source":
235 filename = item['name'][item['name'].find("/")+1:]
236 cfiles.add(filename)
237 return cfiles
238
239def check_kernel_compiled_files(compiled_files, cve_info):
240 """
241 Return if a CVE affected us depending on compiled files
242 """
243 files_affected = set()
244 is_affected = False
245
246 for item in cve_info['containers']['cna']['affected']:
247 if "programFiles" in item:
248 for f in item['programFiles']:
249 if f not in files_affected:
250 files_affected.add(f)
251
252 if len(files_affected) > 0:
253 for f in files_affected:
254 if f in compiled_files:
255 logging.debug("File match: %s", f)
256 is_affected = True
257 return is_affected, files_affected
258
259def get_cpe_applicability(cve_info, v):
260 '''
261 Check if version is affected and return affected versions
262 '''
263 base_branch = Version(f"{v.major}.{v.minor}")
264 affected = []
265 if not 'cpeApplicability' in cve_info["containers"]["cna"]:
266 return None, None, None, None, None, None
267
268 for nodes in cve_info["containers"]["cna"]["cpeApplicability"]:
269 for node in nodes.values():
270 vulnerable = False
271 matched_branch = False
272 first_affected = Version("5000")
273 last_affected = Version("0")
274 better_match_first = Version("0")
275 better_match_last = Version("5000")
276
277 if len(node[0]['cpeMatch']) == 0:
278 first_affected = None
279 last_affected = None
280 better_match_first = None
281 better_match_last = None
282
283 for cpe_match in node[0]['cpeMatch']:
284 version_start_including = Version("0")
285 version_end_excluding = Version("0")
286 if 'versionStartIncluding' in cpe_match:
287 version_start_including = Version(cpe_match['versionStartIncluding'])
288 else:
289 version_start_including = Version("0")
290 # if versionEndExcluding is missing we are in a branch, which is not fixed.
291 if "versionEndExcluding" in cpe_match:
292 version_end_excluding = Version(cpe_match["versionEndExcluding"])
293 else:
294 # if versionEndExcluding is missing we are in a branch, which is not fixed.
295 version_end_excluding = Version(
296 f"{version_start_including.major}.{version_start_including.minor}.5000"
297 )
298 affected.append(f" {version_start_including}-{version_end_excluding}")
299 # Detect if versionEnd is in fixed in base branch. It has precedence over the rest
300 branch_end = Version(f"{version_end_excluding.major}.{version_end_excluding.minor}")
301 if branch_end == base_branch:
302 if version_start_including <= v < version_end_excluding:
303 vulnerable = cpe_match['vulnerable']
304 # If we don't match in our branch, we are not vulnerable,
305 # since we have a backport
306 matched_branch = True
307 better_match_first = version_start_including
308 better_match_last = version_end_excluding
309 if version_start_including <= v < version_end_excluding and not matched_branch:
310 if version_end_excluding < better_match_last:
311 better_match_first = max(version_start_including, better_match_first)
312 better_match_last = min(better_match_last, version_end_excluding)
313 vulnerable = cpe_match['vulnerable']
314 matched_branch = True
315
316 first_affected = min(version_start_including, first_affected)
317 last_affected = max(version_end_excluding, last_affected)
318 # Not a better match, we use the first and last affected instead of the fake .5000
319 if vulnerable and better_match_last == Version(f"{base_branch}.5000"):
320 better_match_last = last_affected
321 better_match_first = first_affected
322 return vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected
323
324def copy_data(old, new):
325 '''Update dictionary with new entries, while keeping the old ones'''
326 for k in new.keys():
327 old[k] = new[k]
328 return old
329
330# Function taken from cve_check.bbclass. Adapted to cve fields
331def cve_update(cve_data, cve, entry):
332 # If no entry, just add it
333 if cve not in cve_data:
334 cve_data[cve] = entry
335 return
336 # If we are updating, there might be change in the status
337 if cve_data[cve]['status'] == "Unknown":
338 cve_data[cve] = copy_data(cve_data[cve], entry)
339 return
340 if cve_data[cve]['status'] == entry['status']:
341 return
342 if entry['status'] == "Unpatched" and cve_data[cve]['status'] == "Patched":
343 logging.warning("CVE entry %s update from Patched to Unpatched from the scan result", cve)
344 cve_data[cve] = copy_data(cve_data[cve], entry)
345 return
346 if entry['status'] == "Patched" and cve_data[cve]['status'] == "Unpatched":
347 logging.warning("CVE entry %s update from Unpatched to Patched from the scan result", cve)
348 cve_data[cve] = copy_data(cve_data[cve], entry)
349 return
350 # If we have an "Ignored", it has a priority
351 if cve_data[cve]['status'] == "Ignored":
352 logging.debug("CVE %s not updating because Ignored", cve)
353 return
354 # If we have an "Ignored", it has a priority
355 if entry['status'] == "Ignored":
356 cve_data[cve] = copy_data(cve_data[cve], entry)
357 logging.debug("CVE entry %s updated from Unpatched to Ignored", cve)
358 return
359 logging.warning("Unhandled CVE entry update for %s %s from %s %s to %s",
360 cve, cve_data[cve]['status'], cve_data[cve]['detail'], entry['status'], entry['detail'])
361
362def main():
363 parser = argparse.ArgumentParser(
364 description="Update cve-summary with kernel compiled files and kernel CVE information"
365 )
366 parser.add_argument(
367 "-s",
368 "--spdx",
369 help="SPDX2/3 for the kernel. Needs to include compiled sources",
370 )
371 parser.add_argument(
372 "--datadir",
373 type=pathlib.Path,
374 help="Directory where CVE data is",
375 required=True
376 )
377 parser.add_argument(
378 "--old-cve-report",
379 help="CVE report to update. (Optional)",
380 )
381 parser.add_argument(
382 "--kernel-version",
383 help="Kernel version. Needed if old cve_report is not provided (Optional)",
384 type=Version
385 )
386 parser.add_argument(
387 "--new-cve-report",
388 help="Output file",
389 default="cve-summary-enhance.json"
390 )
391 parser.add_argument(
392 "-D",
393 "--debug",
394 help='Enable debug ',
395 action="store_true")
396
397 args = parser.parse_args()
398
399 if args.debug:
400 log_level=logging.DEBUG
401 else:
402 log_level=logging.INFO
403 logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', level=log_level)
404
405 if not args.kernel_version and not args.old_cve_report:
406 parser.error("either --kernel-version or --old-cve-report are needed")
407 return -1
408
409 # by default we don't check the compiled files, unless provided
410 compiled_files = []
411 if args.spdx:
412 compiled_files = read_spdx(args.spdx)
413 logging.info("Total compiled files %d", len(compiled_files))
414
415 if args.old_cve_report:
416 with open(args.old_cve_report, encoding='ISO-8859-1') as f:
417 cve_report = json.load(f)
418 else:
419 #If summary not provided, we create one
420 cve_report = {
421 "version": "1",
422 "package": [
423 {
424 "name": "linux-yocto",
425 "version": str(args.kernel_version),
426 "products": [
427 {
428 "product": "linux_kernel",
429 "cvesInRecord": "Yes"
430 }
431 ],
432 "issue": []
433 }
434 ]
435 }
436
437 for pkg in cve_report['package']:
438 is_kernel = False
439 for product in pkg['products']:
440 if product['product'] == "linux_kernel":
441 is_kernel=True
442 if not is_kernel:
443 continue
444
445 kernel_cves = get_kernel_cves(args.datadir,
446 compiled_files,
447 Version(pkg["version"]))
448 logging.info("Total kernel cves from kernel CNA: %s", len(kernel_cves))
449 cves = {issue["id"]: issue for issue in pkg["issue"]}
450 logging.info("Total kernel before processing cves: %s", len(cves))
451
452 for cve in kernel_cves:
453 cve_update(cves, cve, kernel_cves[cve])
454
455 pkg["issue"] = []
456 for cve in sorted(cves):
457 pkg["issue"].extend([cves[cve]])
458 logging.info("Total kernel cves after processing: %s", len(pkg['issue']))
459
460 with open(args.new_cve_report, "w", encoding='ISO-8859-1') as f:
461 json.dump(cve_report, f, indent=2)
462
463 return 0
464
465if __name__ == "__main__":
466 sys.exit(main())
467
diff --git a/scripts/contrib/make-spdx-bindings.sh b/scripts/contrib/make-spdx-bindings.sh
new file mode 100755
index 0000000000..31caaf339d
--- /dev/null
+++ b/scripts/contrib/make-spdx-bindings.sh
@@ -0,0 +1,12 @@
1#! /bin/sh
2#
3# SPDX-License-Identifier: MIT
4
5THIS_DIR="$(dirname "$0")"
6
7VERSION="3.0.1"
8
9shacl2code generate --input https://spdx.org/rdf/$VERSION/spdx-model.ttl \
10 --input https://spdx.org/rdf/$VERSION/spdx-json-serialize-annotations.ttl \
11 --context https://spdx.org/rdf/$VERSION/spdx-context.jsonld \
12 python -o $THIS_DIR/../../meta/lib/oe/spdx30.py
diff --git a/scripts/contrib/oe-build-perf-report-email.py b/scripts/contrib/oe-build-perf-report-email.py
index de3862c897..7192113c28 100755
--- a/scripts/contrib/oe-build-perf-report-email.py
+++ b/scripts/contrib/oe-build-perf-report-email.py
@@ -19,8 +19,6 @@ import socket
19import subprocess 19import subprocess
20import sys 20import sys
21import tempfile 21import tempfile
22from email.mime.image import MIMEImage
23from email.mime.multipart import MIMEMultipart
24from email.mime.text import MIMEText 22from email.mime.text import MIMEText
25 23
26 24
@@ -29,30 +27,6 @@ logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
29log = logging.getLogger('oe-build-perf-report') 27log = logging.getLogger('oe-build-perf-report')
30 28
31 29
32# Find js scaper script
33SCRAPE_JS = os.path.join(os.path.dirname(__file__), '..', 'lib', 'build_perf',
34 'scrape-html-report.js')
35if not os.path.isfile(SCRAPE_JS):
36 log.error("Unableto find oe-build-perf-report-scrape.js")
37 sys.exit(1)
38
39
40class ReportError(Exception):
41 """Local errors"""
42 pass
43
44
45def check_utils():
46 """Check that all needed utils are installed in the system"""
47 missing = []
48 for cmd in ('phantomjs', 'optipng'):
49 if not shutil.which(cmd):
50 missing.append(cmd)
51 if missing:
52 log.error("The following tools are missing: %s", ' '.join(missing))
53 sys.exit(1)
54
55
56def parse_args(argv): 30def parse_args(argv):
57 """Parse command line arguments""" 31 """Parse command line arguments"""
58 description = """Email build perf test report""" 32 description = """Email build perf test report"""
@@ -77,137 +51,19 @@ def parse_args(argv):
77 "the email parts") 51 "the email parts")
78 parser.add_argument('--text', 52 parser.add_argument('--text',
79 help="Plain text message") 53 help="Plain text message")
80 parser.add_argument('--html',
81 help="HTML peport generated by oe-build-perf-report")
82 parser.add_argument('--phantomjs-args', action='append',
83 help="Extra command line arguments passed to PhantomJS")
84 54
85 args = parser.parse_args(argv) 55 args = parser.parse_args(argv)
86 56
87 if not args.html and not args.text: 57 if not args.text:
88 parser.error("Please specify --html and/or --text") 58 parser.error("Please specify --text")
89 59
90 return args 60 return args
91 61
92 62
93def decode_png(infile, outfile): 63def send_email(text_fn, subject, recipients, copy=[], blind_copy=[]):
94 """Parse/decode/optimize png data from a html element"""
95 with open(infile) as f:
96 raw_data = f.read()
97
98 # Grab raw base64 data
99 b64_data = re.sub('^.*href="data:image/png;base64,', '', raw_data, 1)
100 b64_data = re.sub('">.+$', '', b64_data, 1)
101
102 # Replace file with proper decoded png
103 with open(outfile, 'wb') as f:
104 f.write(base64.b64decode(b64_data))
105
106 subprocess.check_output(['optipng', outfile], stderr=subprocess.STDOUT)
107
108
109def mangle_html_report(infile, outfile, pngs):
110 """Mangle html file into a email compatible format"""
111 paste = True
112 png_dir = os.path.dirname(outfile)
113 with open(infile) as f_in:
114 with open(outfile, 'w') as f_out:
115 for line in f_in.readlines():
116 stripped = line.strip()
117 # Strip out scripts
118 if stripped == '<!--START-OF-SCRIPTS-->':
119 paste = False
120 elif stripped == '<!--END-OF-SCRIPTS-->':
121 paste = True
122 elif paste:
123 if re.match('^.+href="data:image/png;base64', stripped):
124 # Strip out encoded pngs (as they're huge in size)
125 continue
126 elif 'www.gstatic.com' in stripped:
127 # HACK: drop references to external static pages
128 continue
129
130 # Replace charts with <img> elements
131 match = re.match('<div id="(?P<id>\w+)"', stripped)
132 if match and match.group('id') in pngs:
133 f_out.write('<img src="cid:{}"\n'.format(match.group('id')))
134 else:
135 f_out.write(line)
136
137
138def scrape_html_report(report, outdir, phantomjs_extra_args=None):
139 """Scrape html report into a format sendable by email"""
140 tmpdir = tempfile.mkdtemp(dir='.')
141 log.debug("Using tmpdir %s for phantomjs output", tmpdir)
142
143 if not os.path.isdir(outdir):
144 os.mkdir(outdir)
145 if os.path.splitext(report)[1] not in ('.html', '.htm'):
146 raise ReportError("Invalid file extension for report, needs to be "
147 "'.html' or '.htm'")
148
149 try:
150 log.info("Scraping HTML report with PhangomJS")
151 extra_args = phantomjs_extra_args if phantomjs_extra_args else []
152 subprocess.check_output(['phantomjs', '--debug=true'] + extra_args +
153 [SCRAPE_JS, report, tmpdir],
154 stderr=subprocess.STDOUT)
155
156 pngs = []
157 images = []
158 for fname in os.listdir(tmpdir):
159 base, ext = os.path.splitext(fname)
160 if ext == '.png':
161 log.debug("Decoding %s", fname)
162 decode_png(os.path.join(tmpdir, fname),
163 os.path.join(outdir, fname))
164 pngs.append(base)
165 images.append(fname)
166 elif ext in ('.html', '.htm'):
167 report_file = fname
168 else:
169 log.warning("Unknown file extension: '%s'", ext)
170 #shutil.move(os.path.join(tmpdir, fname), outdir)
171
172 log.debug("Mangling html report file %s", report_file)
173 mangle_html_report(os.path.join(tmpdir, report_file),
174 os.path.join(outdir, report_file), pngs)
175 return (os.path.join(outdir, report_file),
176 [os.path.join(outdir, i) for i in images])
177 finally:
178 shutil.rmtree(tmpdir)
179
180def send_email(text_fn, html_fn, image_fns, subject, recipients, copy=[],
181 blind_copy=[]):
182 """Send email"""
183 # Generate email message 64 # Generate email message
184 text_msg = html_msg = None 65 with open(text_fn) as f:
185 if text_fn: 66 msg = MIMEText("Yocto build performance test report.\n" + f.read(), 'plain')
186 with open(text_fn) as f:
187 text_msg = MIMEText("Yocto build performance test report.\n" +
188 f.read(), 'plain')
189 if html_fn:
190 html_msg = msg = MIMEMultipart('related')
191 with open(html_fn) as f:
192 html_msg.attach(MIMEText(f.read(), 'html'))
193 for img_fn in image_fns:
194 # Expect that content id is same as the filename
195 cid = os.path.splitext(os.path.basename(img_fn))[0]
196 with open(img_fn, 'rb') as f:
197 image_msg = MIMEImage(f.read())
198 image_msg['Content-ID'] = '<{}>'.format(cid)
199 html_msg.attach(image_msg)
200
201 if text_msg and html_msg:
202 msg = MIMEMultipart('alternative')
203 msg.attach(text_msg)
204 msg.attach(html_msg)
205 elif text_msg:
206 msg = text_msg
207 elif html_msg:
208 msg = html_msg
209 else:
210 raise ReportError("Neither plain text nor html body specified")
211 67
212 pw_data = pwd.getpwuid(os.getuid()) 68 pw_data = pwd.getpwuid(os.getuid())
213 full_name = pw_data.pw_gecos.split(',')[0] 69 full_name = pw_data.pw_gecos.split(',')[0]
@@ -234,8 +90,6 @@ def main(argv=None):
234 if args.debug: 90 if args.debug:
235 log.setLevel(logging.DEBUG) 91 log.setLevel(logging.DEBUG)
236 92
237 check_utils()
238
239 if args.outdir: 93 if args.outdir:
240 outdir = args.outdir 94 outdir = args.outdir
241 if not os.path.exists(outdir): 95 if not os.path.exists(outdir):
@@ -245,25 +99,16 @@ def main(argv=None):
245 99
246 try: 100 try:
247 log.debug("Storing email parts in %s", outdir) 101 log.debug("Storing email parts in %s", outdir)
248 html_report = images = None
249 if args.html:
250 html_report, images = scrape_html_report(args.html, outdir,
251 args.phantomjs_args)
252
253 if args.to: 102 if args.to:
254 log.info("Sending email to %s", ', '.join(args.to)) 103 log.info("Sending email to %s", ', '.join(args.to))
255 if args.cc: 104 if args.cc:
256 log.info("Copying to %s", ', '.join(args.cc)) 105 log.info("Copying to %s", ', '.join(args.cc))
257 if args.bcc: 106 if args.bcc:
258 log.info("Blind copying to %s", ', '.join(args.bcc)) 107 log.info("Blind copying to %s", ', '.join(args.bcc))
259 send_email(args.text, html_report, images, args.subject, 108 send_email(args.text, args.subject, args.to, args.cc, args.bcc)
260 args.to, args.cc, args.bcc)
261 except subprocess.CalledProcessError as err: 109 except subprocess.CalledProcessError as err:
262 log.error("%s, with output:\n%s", str(err), err.output.decode()) 110 log.error("%s, with output:\n%s", str(err), err.output.decode())
263 return 1 111 return 1
264 except ReportError as err:
265 log.error(err)
266 return 1
267 finally: 112 finally:
268 if not args.outdir: 113 if not args.outdir:
269 log.debug("Wiping %s", outdir) 114 log.debug("Wiping %s", outdir)
diff --git a/scripts/contrib/oe-image-files-spdx/.gitignore b/scripts/contrib/oe-image-files-spdx/.gitignore
new file mode 100644
index 0000000000..285851c984
--- /dev/null
+++ b/scripts/contrib/oe-image-files-spdx/.gitignore
@@ -0,0 +1,8 @@
1*.spdx.json
2*.pyc
3*.bak
4*.swp
5*.swo
6*.swn
7venv/*
8.venv/*
diff --git a/scripts/contrib/oe-image-files-spdx/README.md b/scripts/contrib/oe-image-files-spdx/README.md
new file mode 100644
index 0000000000..44f76eacd8
--- /dev/null
+++ b/scripts/contrib/oe-image-files-spdx/README.md
@@ -0,0 +1,24 @@
1# OE Image Files from SBoM
2
3This is an example python script that will list the packaged files with their
4checksums based on the SPDX 3.0.1 SBoM.
5
6It can be used as a template for other programs to investigate output based on
7OE SPDX SBoMs
8
9## Installation
10
11This project can be installed using an virtual environment:
12```
13python3 -m venv .venv
14.venv/bin/activate
15python3 -m pip install -e '.[dev]'
16```
17
18## Usage
19
20After installing, the `oe-image-files` program can be used to show the files, e.g.:
21
22```
23oe-image-files core-image-minimal-qemux86-64.rootfs.spdx.json
24```
diff --git a/scripts/contrib/oe-image-files-spdx/pyproject.toml b/scripts/contrib/oe-image-files-spdx/pyproject.toml
new file mode 100644
index 0000000000..3fab5dd605
--- /dev/null
+++ b/scripts/contrib/oe-image-files-spdx/pyproject.toml
@@ -0,0 +1,23 @@
1[project]
2name = "oe-image-files"
3description = "Displays all packaged files on the root file system"
4dynamic = ["version"]
5requires-python = ">= 3.8"
6readme = "README.md"
7
8dependencies = [
9 "spdx_python_model @ git+https://github.com/spdx/spdx-python-model.git@aa40861f11d1b5d20edba7101835341a70d91179",
10]
11
12[project.scripts]
13oe-image-files = "oe_image_files:main"
14
15[build-system]
16requires = ["hatchling"]
17build-backend = "hatchling.build"
18
19[tool.hatch.version]
20path = "src/oe_image_files/version.py"
21
22[tool.hatch.metadata]
23allow-direct-references = true
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py
new file mode 100644
index 0000000000..c28a133f2d
--- /dev/null
+++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py
@@ -0,0 +1 @@
from .main import main
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py
new file mode 100644
index 0000000000..8476bf6369
--- /dev/null
+++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py
@@ -0,0 +1,86 @@
1# SPDX-License-Identifier: MIT
2
3import argparse
4from pathlib import Path
5
6
7from spdx_python_model import v3_0_1 as spdx_3_0_1
8from .version import VERSION
9
10
11def main():
12 parser = argparse.ArgumentParser(
13 description="Show the packaged files and checksums in an OE image from the SPDX SBoM"
14 )
15 parser.add_argument("file", help="SPDX 3 input file", type=Path)
16 parser.add_argument("--version", "-V", action="version", version=VERSION)
17
18 args = parser.parse_args()
19
20 # Load SPDX data from file into a new object set
21 objset = spdx_3_0_1.SHACLObjectSet()
22 with args.file.open("r") as f:
23 d = spdx_3_0_1.JSONLDDeserializer()
24 d.read(f, objset)
25
26 # Find the top level SPDX Document object
27 for o in objset.foreach_type(spdx_3_0_1.SpdxDocument):
28 doc = o
29 break
30 else:
31 print("ERROR: No SPDX Document found!")
32 return 1
33
34 # Find the root SBoM in the document
35 for o in doc.rootElement:
36 if isinstance(o, spdx_3_0_1.software_Sbom):
37 sbom = o
38 break
39 else:
40 print("ERROR: SBoM not found in document")
41 return 1
42
43 # Find the root file system package in the SBoM
44 for o in sbom.rootElement:
45 if (
46 isinstance(o, spdx_3_0_1.software_Package)
47 and o.software_primaryPurpose == spdx_3_0_1.software_SoftwarePurpose.archive
48 ):
49 root_package = o
50 break
51 else:
52 print("ERROR: Package not found in document")
53 return 1
54
55 # Find all relationships of type "contains" that go FROM the root file
56 # system
57 files = []
58 for rel in objset.foreach_type(spdx_3_0_1.Relationship):
59 if not rel.relationshipType == spdx_3_0_1.RelationshipType.contains:
60 continue
61
62 if not rel.from_ is root_package:
63 continue
64
65 # Iterate over all files in the TO of the relationship
66 for o in rel.to:
67 if not isinstance(o, spdx_3_0_1.software_File):
68 continue
69
70 # Find the SHA 256 hash of the file (if any)
71 for h in o.verifiedUsing:
72 if (
73 isinstance(h, spdx_3_0_1.Hash)
74 and h.algorithm == spdx_3_0_1.HashAlgorithm.sha256
75 ):
76 files.append((o.name, h.hashValue))
77 break
78 else:
79 files.append((o.name, ""))
80
81 # Print files
82 files.sort(key=lambda x: x[0])
83 for name, hash_val in files:
84 print(f"{name} - {hash_val}")
85
86 return 0
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py
new file mode 100644
index 0000000000..901e5110b2
--- /dev/null
+++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py
@@ -0,0 +1 @@
VERSION = "0.0.1"
diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py
index 62c509f51c..d8d7b214e5 100755
--- a/scripts/contrib/patchreview.py
+++ b/scripts/contrib/patchreview.py
@@ -1,14 +1,29 @@
1#! /usr/bin/env python3 1#! /usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
8import argparse
9import collections
10import json
11import os
12import os.path
13import pathlib
14import re
15import subprocess
16
17import sys
18sys.path.append(os.path.join(sys.path[0], '../../meta/lib'))
19import oe.qa
20
6# TODO 21# TODO
7# - option to just list all broken files 22# - option to just list all broken files
8# - test suite 23# - test suite
9# - validate signed-off-by 24# - validate signed-off-by
10 25
11status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied") 26status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied", "inactive-upstream")
12 27
13class Result: 28class Result:
14 # Whether the patch has an Upstream-Status or not 29 # Whether the patch has an Upstream-Status or not
@@ -33,20 +48,18 @@ def blame_patch(patch):
33 From a patch filename, return a list of "commit summary (author name <author 48 From a patch filename, return a list of "commit summary (author name <author
34 email>)" strings representing the history. 49 email>)" strings representing the history.
35 """ 50 """
36 import subprocess
37 return subprocess.check_output(("git", "log", 51 return subprocess.check_output(("git", "log",
38 "--follow", "--find-renames", "--diff-filter=A", 52 "--follow", "--find-renames", "--diff-filter=A",
39 "--format=%s (%aN <%aE>)", 53 "--format=%s (%aN <%aE>)",
40 "--", patch)).decode("utf-8").splitlines() 54 "--", patch), cwd=os.path.dirname(patch)).decode("utf-8").splitlines()
41 55
42def patchreview(path, patches): 56def patchreview(patches):
43 import re, os.path
44 57
45 # General pattern: start of line, optional whitespace, tag with optional 58 # General pattern: start of line, optional whitespace, tag with optional
46 # hyphen or spaces, maybe a colon, some whitespace, then the value, all case 59 # hyphen or spaces, maybe a colon, some whitespace, then the value, all case
47 # insensitive. 60 # insensitive.
48 sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE) 61 sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE)
49 status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*(\w*)", re.IGNORECASE | re.MULTILINE) 62 status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*([\w-]*)", re.IGNORECASE | re.MULTILINE)
50 cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE) 63 cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE)
51 cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE) 64 cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE)
52 65
@@ -54,11 +67,10 @@ def patchreview(path, patches):
54 67
55 for patch in patches: 68 for patch in patches:
56 69
57 fullpath = os.path.join(path, patch)
58 result = Result() 70 result = Result()
59 results[fullpath] = result 71 results[patch] = result
60 72
61 content = open(fullpath, encoding='ascii', errors='ignore').read() 73 content = open(patch, encoding='ascii', errors='ignore').read()
62 74
63 # Find the Signed-off-by tag 75 # Find the Signed-off-by tag
64 match = sob_re.search(content) 76 match = sob_re.search(content)
@@ -70,12 +82,11 @@ def patchreview(path, patches):
70 else: 82 else:
71 result.missing_sob = True 83 result.missing_sob = True
72 84
73
74 # Find the Upstream-Status tag 85 # Find the Upstream-Status tag
75 match = status_re.search(content) 86 match = status_re.search(content)
76 if match: 87 if match:
77 value = match.group(1) 88 value = oe.qa.check_upstream_status(patch)
78 if value != "Upstream-Status:": 89 if value:
79 result.malformed_upstream_status = value 90 result.malformed_upstream_status = value
80 91
81 value = match.group(2).lower() 92 value = match.group(2).lower()
@@ -191,29 +202,56 @@ Patches in Pending state: %s""" % (total_patches,
191def histogram(results): 202def histogram(results):
192 from toolz import recipes, dicttoolz 203 from toolz import recipes, dicttoolz
193 import math 204 import math
205
194 counts = recipes.countby(lambda r: r.upstream_status, results.values()) 206 counts = recipes.countby(lambda r: r.upstream_status, results.values())
195 bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts) 207 bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts)
196 for k in bars: 208 for k in bars:
197 print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k])) 209 print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k]))
198 210
211def find_layers(candidate):
212 # candidate can either be the path to a layer directly (eg meta-intel), or a
213 # repository that contains other layers (meta-arm). We can determine what by
214 # looking for a conf/layer.conf file. If that file exists then it's a layer,
215 # otherwise its a repository of layers and we can assume they're called
216 # meta-*.
217
218 if (candidate / "conf" / "layer.conf").exists():
219 return [candidate.absolute()]
220 else:
221 return [d.absolute() for d in candidate.iterdir() if d.is_dir() and (d.name == "meta" or d.name.startswith("meta-"))]
222
223# TODO these don't actually handle dynamic-layers/
224
225def gather_patches(layers):
226 patches = []
227 for directory in layers:
228 filenames = subprocess.check_output(("git", "-C", directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff"), universal_newlines=True).split()
229 patches += [os.path.join(directory, f) for f in filenames]
230 return patches
231
232def count_recipes(layers):
233 count = 0
234 for directory in layers:
235 output = subprocess.check_output(["git", "-C", directory, "ls-files", "recipes-*/**/*.bb"], universal_newlines=True)
236 count += len(output.splitlines())
237 return count
199 238
200if __name__ == "__main__": 239if __name__ == "__main__":
201 import argparse, subprocess, os
202
203 args = argparse.ArgumentParser(description="Patch Review Tool") 240 args = argparse.ArgumentParser(description="Patch Review Tool")
204 args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches") 241 args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches")
205 args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results") 242 args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results")
206 args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram") 243 args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram")
207 args.add_argument("-j", "--json", help="update JSON") 244 args.add_argument("-j", "--json", help="update JSON")
208 args.add_argument("directory", help="directory to scan") 245 args.add_argument("directory", type=pathlib.Path, metavar="DIRECTORY", help="directory to scan (layer, or repository of layers)")
209 args = args.parse_args() 246 args = args.parse_args()
210 247
211 patches = subprocess.check_output(("git", "-C", args.directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff")).decode("utf-8").split() 248 layers = find_layers(args.directory)
212 results = patchreview(args.directory, patches) 249 print(f"Found layers {' '.join((d.name for d in layers))}")
250 patches = gather_patches(layers)
251 results = patchreview(patches)
213 analyse(results, want_blame=args.blame, verbose=args.verbose) 252 analyse(results, want_blame=args.blame, verbose=args.verbose)
214 253
215 if args.json: 254 if args.json:
216 import json, os.path, collections
217 if os.path.isfile(args.json): 255 if os.path.isfile(args.json):
218 data = json.load(open(args.json)) 256 data = json.load(open(args.json))
219 else: 257 else:
@@ -221,7 +259,11 @@ if __name__ == "__main__":
221 259
222 row = collections.Counter() 260 row = collections.Counter()
223 row["total"] = len(results) 261 row["total"] = len(results)
224 row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"]).decode("utf-8").strip() 262 row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"], universal_newlines=True).strip()
263 row["commit"] = subprocess.check_output(["git", "-C", args.directory, "rev-parse", "HEAD"], universal_newlines=True).strip()
264 row['commit_count'] = subprocess.check_output(["git", "-C", args.directory, "rev-list", "--count", "HEAD"], universal_newlines=True).strip()
265 row['recipe_count'] = count_recipes(layers)
266
225 for r in results.values(): 267 for r in results.values():
226 if r.upstream_status in status_values: 268 if r.upstream_status in status_values:
227 row[r.upstream_status] += 1 269 row[r.upstream_status] += 1
@@ -231,7 +273,7 @@ if __name__ == "__main__":
231 row['malformed-sob'] += 1 273 row['malformed-sob'] += 1
232 274
233 data.append(row) 275 data.append(row)
234 json.dump(data, open(args.json, "w")) 276 json.dump(data, open(args.json, "w"), sort_keys=True, indent="\t")
235 277
236 if args.histogram: 278 if args.histogram:
237 print() 279 print()
diff --git a/scripts/contrib/test_build_time.sh b/scripts/contrib/test_build_time.sh
index 23f238adf6..4012ac7ba7 100755
--- a/scripts/contrib/test_build_time.sh
+++ b/scripts/contrib/test_build_time.sh
@@ -97,7 +97,7 @@ if [ $? != 0 ] ; then
97 exit 251 97 exit 251
98fi 98fi
99 99
100if [ "$BB_ENV_EXTRAWHITE" != "" ] ; then 100if [ "BB_ENV_PASSTHROUGH_ADDITIONS" != "" ] ; then
101 echo "WARNING: you are running after sourcing the build environment script, this is not recommended" 101 echo "WARNING: you are running after sourcing the build environment script, this is not recommended"
102fi 102fi
103 103
diff --git a/scripts/contrib/test_build_time_worker.sh b/scripts/contrib/test_build_time_worker.sh
index 478e8b0d03..a2879d2336 100755
--- a/scripts/contrib/test_build_time_worker.sh
+++ b/scripts/contrib/test_build_time_worker.sh
@@ -1,5 +1,7 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# This is an example script to be used in conjunction with test_build_time.sh 7# This is an example script to be used in conjunction with test_build_time.sh
diff --git a/scripts/contrib/verify-homepage.py b/scripts/contrib/verify-homepage.py
index 7bffa78e23..a90b5010bc 100755
--- a/scripts/contrib/verify-homepage.py
+++ b/scripts/contrib/verify-homepage.py
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# This script can be used to verify HOMEPAGE values for all recipes in 7# This script can be used to verify HOMEPAGE values for all recipes in
diff --git a/scripts/cp-noerror b/scripts/cp-noerror
index ab617c5d35..13a098eee0 100755
--- a/scripts/cp-noerror
+++ b/scripts/cp-noerror
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Allow copying of $1 to $2 but if files in $1 disappear during the copy operation, 7# Allow copying of $1 to $2 but if files in $1 disappear during the copy operation,
diff --git a/scripts/create-pull-request b/scripts/create-pull-request
index 8eefcf63a5..885105fab3 100755
--- a/scripts/create-pull-request
+++ b/scripts/create-pull-request
@@ -128,7 +128,7 @@ PROTO_RE="[a-z][a-z+]*://"
128GIT_RE="\(^\($PROTO_RE\)\?\)\($USER_RE@\)\?\([^:/]*\)[:/]\(.*\)" 128GIT_RE="\(^\($PROTO_RE\)\?\)\($USER_RE@\)\?\([^:/]*\)[:/]\(.*\)"
129REMOTE_URL=${REMOTE_URL%.git} 129REMOTE_URL=${REMOTE_URL%.git}
130REMOTE_REPO=$(echo $REMOTE_URL | sed "s#$GIT_RE#\5#") 130REMOTE_REPO=$(echo $REMOTE_URL | sed "s#$GIT_RE#\5#")
131REMOTE_URL=$(echo $REMOTE_URL | sed "s#$GIT_RE#git://\4/\5#") 131REMOTE_URL=$(echo $REMOTE_URL | sed "s#$GIT_RE#https://\4/\5#")
132 132
133if [ -z "$BRANCH" ]; then 133if [ -z "$BRANCH" ]; then
134 BRANCH=$(git branch | grep -e "^\* " | cut -d' ' -f2) 134 BRANCH=$(git branch | grep -e "^\* " | cut -d' ' -f2)
@@ -149,13 +149,10 @@ fi
149WEB_URL="" 149WEB_URL=""
150case "$REMOTE_URL" in 150case "$REMOTE_URL" in
151 *git.yoctoproject.org*) 151 *git.yoctoproject.org*)
152 WEB_URL="http://git.yoctoproject.org/cgit.cgi/$REMOTE_REPO/log/?h=$BRANCH" 152 WEB_URL="https://git.yoctoproject.org/$REMOTE_REPO/log/?h=$BRANCH"
153 ;;
154 *git.pokylinux.org*)
155 WEB_URL="http://git.pokylinux.org/cgit.cgi/$REMOTE_REPO/log/?h=$BRANCH"
156 ;; 153 ;;
157 *git.openembedded.org*) 154 *git.openembedded.org*)
158 WEB_URL="http://cgit.openembedded.org/$REMOTE_REPO/log/?h=$BRANCH" 155 WEB_URL="https://git.openembedded.org/$REMOTE_REPO/log/?h=$BRANCH"
159 ;; 156 ;;
160 *github.com*) 157 *github.com*)
161 WEB_URL="https://github.com/$REMOTE_REPO/tree/$BRANCH" 158 WEB_URL="https://github.com/$REMOTE_REPO/tree/$BRANCH"
diff --git a/scripts/crosstap b/scripts/crosstap
index 73c8947442..5aa72f14d4 100755
--- a/scripts/crosstap
+++ b/scripts/crosstap
@@ -353,7 +353,7 @@ bitbake workspace.
353 353
354Anything after -- option is passed directly to stap. 354Anything after -- option is passed directly to stap.
355 355
356Legacy script invocation style supported but depreciated: 356Legacy script invocation style supported but deprecated:
357 %prog <user@hostname> <sytemtap-script> [systemtap options] 357 %prog <user@hostname> <sytemtap-script> [systemtap options]
358 358
359To enable most out of systemtap the following site.conf or local.conf 359To enable most out of systemtap the following site.conf or local.conf
diff --git a/scripts/cve-json-to-text.py b/scripts/cve-json-to-text.py
new file mode 100755
index 0000000000..8d309b37e5
--- /dev/null
+++ b/scripts/cve-json-to-text.py
@@ -0,0 +1,146 @@
1#!/bin/env python3
2# SPDX-FileCopyrightText: OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5
6# CVE results conversion script: JSON format to text
7# Derived from cve-report.py from Oniro (MIT, by Huawei Inc)
8
9import sys
10import getopt
11
12infile = "in.json"
13outfile = "out.txt"
14
15
16def show_syntax_and_exit(code):
17 """
18 Show the program syntax and exit with an errror
19 Arguments:
20 code: the error code to return
21 """
22 print("Syntax: %s [-h] [-i inputJSONfile][-o outputfile]" % sys.argv[0])
23 sys.exit(code)
24
25
26def exit_error(code, message):
27 """
28 Show the error message and exit with an errror
29 Arguments:
30 code: the error code to return
31 message: the message to show
32 """
33 print("Error: %s" % message)
34 sys.exit(code)
35
36
37def parse_args(argv):
38 """
39 Parse the program arguments, put options in global variables
40 Arguments:
41 argv: program arguments
42 """
43 global infile, outfile
44 try:
45 opts, args = getopt.getopt(
46 argv, "hi:o:", ["help", "input", "output"]
47 )
48 except getopt.GetoptError:
49 show_syntax_and_exit(1)
50 for opt, arg in opts:
51 if opt in ("-h"):
52 show_syntax_and_exit(0)
53 elif opt in ("-i"):
54 infile = arg
55 elif opt in ("-o"):
56 outfile = arg
57
58def load_json(filename):
59 """
60 Load the JSON file, return the resulting dictionary
61 Arguments:
62 filename: the file to open
63 Returns:
64 Parsed file as a dictionary
65 """
66 import json
67
68 out = {}
69 try:
70 with open(filename, "r") as f:
71 out = json.load(f)
72 except FileNotFoundError:
73 exit_error(1, "Input file (%s) not found" % (filename))
74 except json.decoder.JSONDecodeError as error:
75 exit_error(1, "Malformed JSON file: %s" % str(error))
76 return out
77
78
79def process_data(filename, data):
80 """
81 Write the resulting CSV with one line for each package
82 Arguments:
83 filename: the file to write to
84 data: dictionary from parsing the JSON file
85 Returns:
86 None
87 """
88 if not "version" in data or data["version"] != "1":
89 exit_error(1, "Unrecognized format version number")
90 if not "package" in data:
91 exit_error(1, "Mandatory 'package' key not found")
92
93 lines = ""
94 total_issue_count = 0
95 for package in data["package"]:
96 package_info = ""
97 keys_in_package = {"name", "layer", "version", "issue"}
98 if keys_in_package - package.keys():
99 exit_error(
100 1,
101 "Missing a mandatory key in package: %s"
102 % (keys_in_package - package.keys()),
103 )
104
105 package_info += "LAYER: %s\n" % package["layer"]
106 package_info += "PACKAGE NAME: %s\n" % package["name"]
107 package_info += "PACKAGE VERSION: %s\n" % package["version"]
108
109 for issue in package["issue"]:
110 keys_in_issue = {"id", "status", "detail"}
111 if keys_in_issue - issue.keys():
112 print("Warning: Missing keys %s in 'issue' for the package '%s'"
113 % (keys_in_issue - issue.keys(), package["name"]))
114
115 lines += package_info
116 lines += "CVE: %s\n" % issue["id"]
117 lines += "CVE STATUS: %s\n" % issue["status"]
118 lines += "CVE DETAIL: %s\n" % issue["detail"]
119 if "description" in issue:
120 lines += "CVE DESCRIPTION: %s\n" % issue["description"]
121 if "summary" in issue:
122 lines += "CVE SUMMARY: %s\n" % issue["summary"]
123 if "scorev2" in issue:
124 lines += "CVSS v2 BASE SCORE: %s\n" % issue["scorev2"]
125 if "scorev3" in issue:
126 lines += "CVSS v3 BASE SCORE: %s\n" % issue["scorev3"]
127 if "scorev4" in issue:
128 lines += "CVSS v4 BASE SCORE: %s\n" % issue["scorev4"]
129 if "vector" in issue:
130 lines += "VECTOR: %s\n" % issue["vector"]
131 if "vectorString" in issue:
132 lines += "VECTORSTRING: %s\n" % issue["vectorString"]
133 lines += "MORE INFORMATION: https://nvd.nist.gov/vuln/detail/%s\n" % issue["id"]
134 lines += "\n"
135
136 with open(filename, "w") as f:
137 f.write(lines)
138
139def main(argv):
140 parse_args(argv)
141 data = load_json(infile)
142 process_data(outfile, data)
143
144
145if __name__ == "__main__":
146 main(sys.argv[1:])
diff --git a/scripts/devtool b/scripts/devtool
index af4811b922..39cebec0d8 100755
--- a/scripts/devtool
+++ b/scripts/devtool
@@ -7,19 +7,17 @@
7# SPDX-License-Identifier: GPL-2.0-only 7# SPDX-License-Identifier: GPL-2.0-only
8# 8#
9 9
10import dataclasses
10import sys 11import sys
11import os 12import os
12import argparse 13import argparse
13import glob 14import glob
14import re 15import re
15import configparser 16import configparser
16import subprocess
17import logging 17import logging
18 18
19basepath = '' 19# This can be removed once our minimum is Python 3.9: https://docs.python.org/3/whatsnew/3.9.html#type-hinting-generics-in-standard-collections
20workspace = {} 20from typing import List
21config = None
22context = None
23 21
24 22
25scripts_path = os.path.dirname(os.path.realpath(__file__)) 23scripts_path = os.path.dirname(os.path.realpath(__file__))
@@ -30,16 +28,16 @@ import scriptutils
30import argparse_oe 28import argparse_oe
31logger = scriptutils.logger_create('devtool') 29logger = scriptutils.logger_create('devtool')
32 30
33plugins = []
34 31
35 32class ConfigHandler:
36class ConfigHandler(object): 33 basepath = None
37 config_file = '' 34 config_file = ''
38 config_obj = None 35 config_obj = None
39 init_path = '' 36 init_path = ''
40 workspace_path = '' 37 workspace_path = ''
41 38
42 def __init__(self, filename): 39 def __init__(self, basepath, filename):
40 self.basepath = basepath
43 self.config_file = filename 41 self.config_file = filename
44 self.config_obj = configparser.ConfigParser() 42 self.config_obj = configparser.ConfigParser()
45 43
@@ -47,7 +45,7 @@ class ConfigHandler(object):
47 try: 45 try:
48 ret = self.config_obj.get(section, option) 46 ret = self.config_obj.get(section, option)
49 except (configparser.NoOptionError, configparser.NoSectionError): 47 except (configparser.NoOptionError, configparser.NoSectionError):
50 if default != None: 48 if default is not None:
51 ret = default 49 ret = default
52 else: 50 else:
53 raise 51 raise
@@ -59,14 +57,14 @@ class ConfigHandler(object):
59 57
60 if self.config_obj.has_option('General', 'init_path'): 58 if self.config_obj.has_option('General', 'init_path'):
61 pth = self.get('General', 'init_path') 59 pth = self.get('General', 'init_path')
62 self.init_path = os.path.join(basepath, pth) 60 self.init_path = os.path.join(self.basepath, pth)
63 if not os.path.exists(self.init_path): 61 if not os.path.exists(self.init_path):
64 logger.error('init_path %s specified in config file cannot be found' % pth) 62 logger.error('init_path %s specified in config file cannot be found' % pth)
65 return False 63 return False
66 else: 64 else:
67 self.config_obj.add_section('General') 65 self.config_obj.add_section('General')
68 66
69 self.workspace_path = self.get('General', 'workspace_path', os.path.join(basepath, 'workspace')) 67 self.workspace_path = self.get('General', 'workspace_path', os.path.join(self.basepath, 'workspace'))
70 return True 68 return True
71 69
72 70
@@ -81,29 +79,32 @@ class ConfigHandler(object):
81 self.config_obj.add_section(section) 79 self.config_obj.add_section(section)
82 self.config_obj.set(section, option, value) 80 self.config_obj.set(section, option, value)
83 81
82
83@dataclasses.dataclass
84class Context: 84class Context:
85 def __init__(self, **kwargs): 85 fixed_setup: bool
86 self.__dict__.update(kwargs) 86 config: ConfigHandler
87 pluginpaths: List[str]
87 88
88 89
89def read_workspace(): 90def read_workspace(basepath, context):
90 global workspace
91 workspace = {} 91 workspace = {}
92 if not os.path.exists(os.path.join(config.workspace_path, 'conf', 'layer.conf')): 92 if not os.path.exists(os.path.join(context.config.workspace_path, 'conf', 'layer.conf')):
93 if context.fixed_setup: 93 if context.fixed_setup:
94 logger.error("workspace layer not set up") 94 logger.error("workspace layer not set up")
95 sys.exit(1) 95 sys.exit(1)
96 else: 96 else:
97 logger.info('Creating workspace layer in %s' % config.workspace_path) 97 logger.info('Creating workspace layer in %s' % context.config.workspace_path)
98 _create_workspace(config.workspace_path, config, basepath) 98 _create_workspace(context.config.workspace_path, basepath)
99 if not context.fixed_setup: 99 if not context.fixed_setup:
100 _enable_workspace_layer(config.workspace_path, config, basepath) 100 _enable_workspace_layer(context.config.workspace_path, context.config, basepath)
101 101
102 logger.debug('Reading workspace in %s' % config.workspace_path) 102 logger.debug('Reading workspace in %s' % context.config.workspace_path)
103 externalsrc_re = re.compile(r'^EXTERNALSRC(:pn-([^ =]+))? *= *"([^"]*)"$') 103 externalsrc_re = re.compile(r'^EXTERNALSRC(:pn-([^ =]+))? *= *"([^"]*)"$')
104 for fn in glob.glob(os.path.join(config.workspace_path, 'appends', '*.bbappend')): 104 for fn in glob.glob(os.path.join(context.config.workspace_path, 'appends', '*.bbappend')):
105 with open(fn, 'r') as f: 105 with open(fn, 'r') as f:
106 pnvalues = {} 106 pnvalues = {}
107 pn = None
107 for line in f: 108 for line in f:
108 res = externalsrc_re.match(line.rstrip()) 109 res = externalsrc_re.match(line.rstrip())
109 if res: 110 if res:
@@ -111,7 +112,7 @@ def read_workspace():
111 pn = res.group(2) or recipepn 112 pn = res.group(2) or recipepn
112 # Find the recipe file within the workspace, if any 113 # Find the recipe file within the workspace, if any
113 bbfile = os.path.basename(fn).replace('.bbappend', '.bb').replace('%', '*') 114 bbfile = os.path.basename(fn).replace('.bbappend', '.bb').replace('%', '*')
114 recipefile = glob.glob(os.path.join(config.workspace_path, 115 recipefile = glob.glob(os.path.join(context.config.workspace_path,
115 'recipes', 116 'recipes',
116 recipepn, 117 recipepn,
117 bbfile)) 118 bbfile))
@@ -123,27 +124,42 @@ def read_workspace():
123 elif line.startswith('# srctreebase: '): 124 elif line.startswith('# srctreebase: '):
124 pnvalues['srctreebase'] = line.split(':', 1)[1].strip() 125 pnvalues['srctreebase'] = line.split(':', 1)[1].strip()
125 if pnvalues: 126 if pnvalues:
127 if not pn:
128 raise DevtoolError("Found *.bbappend in %s, but could not determine EXTERNALSRC:pn-*. "
129 "Maybe still using old syntax?" % context.config.workspace_path)
126 if not pnvalues.get('srctreebase', None): 130 if not pnvalues.get('srctreebase', None):
127 pnvalues['srctreebase'] = pnvalues['srctree'] 131 pnvalues['srctreebase'] = pnvalues['srctree']
128 logger.debug('Found recipe %s' % pnvalues) 132 logger.debug('Found recipe %s' % pnvalues)
129 workspace[pn] = pnvalues 133 workspace[pn] = pnvalues
130 134
131def create_workspace(args, config, basepath, workspace): 135 return workspace
136
137def create_workspace(args, config, basepath, _workspace):
132 if args.layerpath: 138 if args.layerpath:
133 workspacedir = os.path.abspath(args.layerpath) 139 workspacedir = os.path.abspath(args.layerpath)
134 else: 140 else:
135 workspacedir = os.path.abspath(os.path.join(basepath, 'workspace')) 141 workspacedir = os.path.abspath(os.path.join(basepath, 'workspace'))
136 _create_workspace(workspacedir, config, basepath) 142 layerseries = None
143 if args.layerseries:
144 layerseries = args.layerseries
145 _create_workspace(workspacedir, basepath, layerseries)
137 if not args.create_only: 146 if not args.create_only:
138 _enable_workspace_layer(workspacedir, config, basepath) 147 _enable_workspace_layer(workspacedir, config, basepath)
139 148
140def _create_workspace(workspacedir, config, basepath): 149def _create_workspace(workspacedir, basepath, layerseries=None):
141 import bb 150 import bb.utils
142 151
143 confdir = os.path.join(workspacedir, 'conf') 152 confdir = os.path.join(workspacedir, 'conf')
144 if os.path.exists(os.path.join(confdir, 'layer.conf')): 153 if os.path.exists(os.path.join(confdir, 'layer.conf')):
145 logger.info('Specified workspace already set up, leaving as-is') 154 logger.info('Specified workspace already set up, leaving as-is')
146 else: 155 else:
156 if not layerseries:
157 tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
158 try:
159 layerseries = tinfoil.config_data.getVar('LAYERSERIES_CORENAMES')
160 finally:
161 tinfoil.shutdown()
162
147 # Add a config file 163 # Add a config file
148 bb.utils.mkdirhier(confdir) 164 bb.utils.mkdirhier(confdir)
149 with open(os.path.join(confdir, 'layer.conf'), 'w') as f: 165 with open(os.path.join(confdir, 'layer.conf'), 'w') as f:
@@ -155,7 +171,7 @@ def _create_workspace(workspacedir, config, basepath):
155 f.write('BBFILE_PATTERN_workspacelayer = "^$' + '{LAYERDIR}/"\n') 171 f.write('BBFILE_PATTERN_workspacelayer = "^$' + '{LAYERDIR}/"\n')
156 f.write('BBFILE_PATTERN_IGNORE_EMPTY_workspacelayer = "1"\n') 172 f.write('BBFILE_PATTERN_IGNORE_EMPTY_workspacelayer = "1"\n')
157 f.write('BBFILE_PRIORITY_workspacelayer = "99"\n') 173 f.write('BBFILE_PRIORITY_workspacelayer = "99"\n')
158 f.write('LAYERSERIES_COMPAT_workspacelayer = "${LAYERSERIES_COMPAT_core}"\n') 174 f.write('LAYERSERIES_COMPAT_workspacelayer = "%s"\n' % layerseries)
159 # Add a README file 175 # Add a README file
160 with open(os.path.join(workspacedir, 'README'), 'w') as f: 176 with open(os.path.join(workspacedir, 'README'), 'w') as f:
161 f.write('This layer was created by the OpenEmbedded devtool utility in order to\n') 177 f.write('This layer was created by the OpenEmbedded devtool utility in order to\n')
@@ -176,7 +192,7 @@ def _create_workspace(workspacedir, config, basepath):
176 192
177def _enable_workspace_layer(workspacedir, config, basepath): 193def _enable_workspace_layer(workspacedir, config, basepath):
178 """Ensure the workspace layer is in bblayers.conf""" 194 """Ensure the workspace layer is in bblayers.conf"""
179 import bb 195 import bb.utils
180 bblayers_conf = os.path.join(basepath, 'conf', 'bblayers.conf') 196 bblayers_conf = os.path.join(basepath, 'conf', 'bblayers.conf')
181 if not os.path.exists(bblayers_conf): 197 if not os.path.exists(bblayers_conf):
182 logger.error('Unable to find bblayers.conf') 198 logger.error('Unable to find bblayers.conf')
@@ -195,15 +211,9 @@ def _enable_workspace_layer(workspacedir, config, basepath):
195 211
196 212
197def main(): 213def main():
198 global basepath
199 global config
200 global context
201
202 if sys.getfilesystemencoding() != "utf-8": 214 if sys.getfilesystemencoding() != "utf-8":
203 sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.") 215 sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.")
204 216
205 context = Context(fixed_setup=False)
206
207 # Default basepath 217 # Default basepath
208 basepath = os.path.dirname(os.path.abspath(__file__)) 218 basepath = os.path.dirname(os.path.abspath(__file__))
209 219
@@ -228,21 +238,23 @@ def main():
228 elif global_args.quiet: 238 elif global_args.quiet:
229 logger.setLevel(logging.ERROR) 239 logger.setLevel(logging.ERROR)
230 240
241 is_fixed_setup = False
242
231 if global_args.basepath: 243 if global_args.basepath:
232 # Override 244 # Override
233 basepath = global_args.basepath 245 basepath = global_args.basepath
234 if os.path.exists(os.path.join(basepath, '.devtoolbase')): 246 if os.path.exists(os.path.join(basepath, '.devtoolbase')):
235 context.fixed_setup = True 247 is_fixed_setup = True
236 else: 248 else:
237 pth = basepath 249 pth = basepath
238 while pth != '' and pth != os.sep: 250 while pth != '' and pth != os.sep:
239 if os.path.exists(os.path.join(pth, '.devtoolbase')): 251 if os.path.exists(os.path.join(pth, '.devtoolbase')):
240 context.fixed_setup = True 252 is_fixed_setup = True
241 basepath = pth 253 basepath = pth
242 break 254 break
243 pth = os.path.dirname(pth) 255 pth = os.path.dirname(pth)
244 256
245 if not context.fixed_setup: 257 if not is_fixed_setup:
246 basepath = os.environ.get('BUILDDIR') 258 basepath = os.environ.get('BUILDDIR')
247 if not basepath: 259 if not basepath:
248 logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)") 260 logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)")
@@ -250,10 +262,9 @@ def main():
250 262
251 logger.debug('Using basepath %s' % basepath) 263 logger.debug('Using basepath %s' % basepath)
252 264
253 config = ConfigHandler(os.path.join(basepath, 'conf', 'devtool.conf')) 265 config = ConfigHandler(basepath, os.path.join(basepath, 'conf', 'devtool.conf'))
254 if not config.read(): 266 if not config.read():
255 return -1 267 return -1
256 context.config = config
257 268
258 bitbake_subdir = config.get('General', 'bitbake_subdir', '') 269 bitbake_subdir = config.get('General', 'bitbake_subdir', '')
259 if bitbake_subdir: 270 if bitbake_subdir:
@@ -275,6 +286,7 @@ def main():
275 scriptutils.logger_setup_color(logger, global_args.color) 286 scriptutils.logger_setup_color(logger, global_args.color)
276 287
277 if global_args.bbpath is None: 288 if global_args.bbpath is None:
289 import bb
278 try: 290 try:
279 tinfoil = setup_tinfoil(config_only=True, basepath=basepath) 291 tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
280 try: 292 try:
@@ -285,8 +297,12 @@ def main():
285 return 2 297 return 2
286 298
287 # Search BBPATH first to allow layers to override plugins in scripts_path 299 # Search BBPATH first to allow layers to override plugins in scripts_path
288 for path in global_args.bbpath.split(':') + [scripts_path]: 300 pluginpaths = [os.path.join(path, 'lib', 'devtool') for path in global_args.bbpath.split(':') + [scripts_path]]
289 pluginpath = os.path.join(path, 'lib', 'devtool') 301
302 context = Context(fixed_setup=is_fixed_setup, config=config, pluginpaths=pluginpaths)
303
304 plugins = []
305 for pluginpath in pluginpaths:
290 scriptutils.load_plugins(logger, plugins, pluginpath) 306 scriptutils.load_plugins(logger, plugins, pluginpath)
291 307
292 subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>') 308 subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
@@ -305,6 +321,7 @@ def main():
305 description='Sets up a new workspace. NOTE: other devtool subcommands will create a workspace automatically as needed, so you only need to use %(prog)s if you want to specify where the workspace should be located.', 321 description='Sets up a new workspace. NOTE: other devtool subcommands will create a workspace automatically as needed, so you only need to use %(prog)s if you want to specify where the workspace should be located.',
306 group='advanced') 322 group='advanced')
307 parser_create_workspace.add_argument('layerpath', nargs='?', help='Path in which the workspace layer should be created') 323 parser_create_workspace.add_argument('layerpath', nargs='?', help='Path in which the workspace layer should be created')
324 parser_create_workspace.add_argument('--layerseries', help='Layer series the workspace should be set to be compatible with')
308 parser_create_workspace.add_argument('--create-only', action="store_true", help='Only create the workspace layer, do not alter configuration') 325 parser_create_workspace.add_argument('--create-only', action="store_true", help='Only create the workspace layer, do not alter configuration')
309 parser_create_workspace.set_defaults(func=create_workspace, no_workspace=True) 326 parser_create_workspace.set_defaults(func=create_workspace, no_workspace=True)
310 327
@@ -314,10 +331,10 @@ def main():
314 331
315 args = parser.parse_args(unparsed_args, namespace=global_args) 332 args = parser.parse_args(unparsed_args, namespace=global_args)
316 333
317 if not getattr(args, 'no_workspace', False):
318 read_workspace()
319
320 try: 334 try:
335 workspace = {}
336 if not getattr(args, 'no_workspace', False):
337 workspace = read_workspace(basepath, context)
321 ret = args.func(args, config, basepath, workspace) 338 ret = args.func(args, config, basepath, workspace)
322 except DevtoolError as err: 339 except DevtoolError as err:
323 if str(err): 340 if str(err):
@@ -325,6 +342,7 @@ def main():
325 ret = err.exitcode 342 ret = err.exitcode
326 except argparse_oe.ArgumentUsageError as ae: 343 except argparse_oe.ArgumentUsageError as ae:
327 parser.error_subcommand(ae.message, ae.subcommand) 344 parser.error_subcommand(ae.message, ae.subcommand)
345 ret = 2
328 346
329 return ret 347 return ret
330 348
diff --git a/scripts/esdk-tools/devtool b/scripts/esdk-tools/devtool
new file mode 120000
index 0000000000..176a01ca68
--- /dev/null
+++ b/scripts/esdk-tools/devtool
@@ -0,0 +1 @@
../devtool \ No newline at end of file
diff --git a/scripts/esdk-tools/oe-find-native-sysroot b/scripts/esdk-tools/oe-find-native-sysroot
new file mode 120000
index 0000000000..d3493f3310
--- /dev/null
+++ b/scripts/esdk-tools/oe-find-native-sysroot
@@ -0,0 +1 @@
../oe-find-native-sysroot \ No newline at end of file
diff --git a/scripts/esdk-tools/recipetool b/scripts/esdk-tools/recipetool
new file mode 120000
index 0000000000..60a95dd936
--- /dev/null
+++ b/scripts/esdk-tools/recipetool
@@ -0,0 +1 @@
../recipetool \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu b/scripts/esdk-tools/runqemu
new file mode 120000
index 0000000000..ae7e7ad7c2
--- /dev/null
+++ b/scripts/esdk-tools/runqemu
@@ -0,0 +1 @@
../runqemu \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-addptable2image b/scripts/esdk-tools/runqemu-addptable2image
new file mode 120000
index 0000000000..afcd00e79d
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-addptable2image
@@ -0,0 +1 @@
../runqemu-addptable2image \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-export-rootfs b/scripts/esdk-tools/runqemu-export-rootfs
new file mode 120000
index 0000000000..a26fcf6110
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-export-rootfs
@@ -0,0 +1 @@
../runqemu-export-rootfs \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-extract-sdk b/scripts/esdk-tools/runqemu-extract-sdk
new file mode 120000
index 0000000000..cc858aaad5
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-extract-sdk
@@ -0,0 +1 @@
../runqemu-extract-sdk \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-gen-tapdevs b/scripts/esdk-tools/runqemu-gen-tapdevs
new file mode 120000
index 0000000000..dbdf79134c
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-gen-tapdevs
@@ -0,0 +1 @@
../runqemu-gen-tapdevs \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-ifdown b/scripts/esdk-tools/runqemu-ifdown
new file mode 120000
index 0000000000..0097693ca3
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-ifdown
@@ -0,0 +1 @@
../runqemu-ifdown \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-ifup b/scripts/esdk-tools/runqemu-ifup
new file mode 120000
index 0000000000..41026d2c0a
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-ifup
@@ -0,0 +1 @@
../runqemu-ifup \ No newline at end of file
diff --git a/scripts/esdk-tools/wic b/scripts/esdk-tools/wic
new file mode 120000
index 0000000000..a9d908aa25
--- /dev/null
+++ b/scripts/esdk-tools/wic
@@ -0,0 +1 @@
../wic \ No newline at end of file
diff --git a/scripts/gen-lockedsig-cache b/scripts/gen-lockedsig-cache
index cc674f9c1b..023015ec41 100755
--- a/scripts/gen-lockedsig-cache
+++ b/scripts/gen-lockedsig-cache
@@ -1,5 +1,8 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3#
4# Copyright OpenEmbedded Contributors
5#
3# SPDX-License-Identifier: GPL-2.0-only 6# SPDX-License-Identifier: GPL-2.0-only
4# 7#
5 8
diff --git a/scripts/gen-site-config b/scripts/gen-site-config
deleted file mode 100755
index 727b809c0f..0000000000
--- a/scripts/gen-site-config
+++ /dev/null
@@ -1,43 +0,0 @@
1#! /bin/sh
2# Copyright (c) 2005-2008 Wind River Systems, Inc.
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7cat << EOF
8AC_PREREQ(2.57)
9AC_INIT([site_wide],[1.0.0])
10
11EOF
12
13# Disable as endian is set in the default config
14#echo AC_C_BIGENDIAN
15#echo
16
17if [ -e $1/types ] ; then
18 while read type ; do
19 echo "AC_CHECK_SIZEOF([$type])"
20 done < $1/types
21
22 echo
23fi
24
25if [ -e $1/funcs ]; then
26 while read func ; do
27 echo "AC_CHECK_FUNCS([$func])"
28 done < $1/funcs
29
30 echo
31fi
32
33if [ -e $1/headers ]; then
34 while read header ; do
35 echo "AC_CHECK_HEADERS([$header])"
36 done < $1/headers
37
38 echo
39fi
40
41cat << EOF
42AC_OUTPUT
43EOF
diff --git a/scripts/git b/scripts/git
new file mode 100755
index 0000000000..689adbf9dd
--- /dev/null
+++ b/scripts/git
@@ -0,0 +1,30 @@
1#!/usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7# Wrapper around 'git' that doesn't think we are root
8
9import os
10import shutil
11import sys
12
13os.environ['PSEUDO_UNLOAD'] = '1'
14
15# calculate path to the real 'git'
16path = os.environ['PATH']
17# we need to remove our path but also any other copy of this script which
18# may be present, e.g. eSDK.
19replacements = [os.path.dirname(sys.argv[0])]
20for p in path.split(":"):
21 if p.endswith("/scripts"):
22 replacements.append(p)
23for r in replacements:
24 path = path.replace(r, '/ignoreme')
25real_git = shutil.which('git', path=path)
26
27if len(sys.argv) == 1:
28 os.execl(real_git, 'git')
29
30os.execv(real_git, sys.argv)
diff --git a/scripts/install-buildtools b/scripts/install-buildtools
index 8554a5db67..aa23942858 100755
--- a/scripts/install-buildtools
+++ b/scripts/install-buildtools
@@ -56,10 +56,10 @@ PROGNAME = 'install-buildtools'
56logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout) 56logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout)
57 57
58DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools') 58DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools')
59DEFAULT_BASE_URL = 'http://downloads.yoctoproject.org/releases/yocto' 59DEFAULT_BASE_URL = 'https://downloads.yoctoproject.org/releases/yocto'
60DEFAULT_RELEASE = 'yocto-3.2_M3' 60DEFAULT_RELEASE = 'yocto-5.2.1'
61DEFAULT_INSTALLER_VERSION = '3.1+snapshot' 61DEFAULT_INSTALLER_VERSION = '5.2.1'
62DEFAULT_BUILDDATE = '20200923' 62DEFAULT_BUILDDATE = '202110XX'
63 63
64# Python version sanity check 64# Python version sanity check
65if not (sys.version_info.major == 3 and sys.version_info.minor >= 4): 65if not (sys.version_info.major == 3 and sys.version_info.minor >= 4):
@@ -102,6 +102,16 @@ def sha256_file(filename):
102 import hashlib 102 import hashlib
103 return _hasher(hashlib.sha256(), filename) 103 return _hasher(hashlib.sha256(), filename)
104 104
105def remove_quotes(var):
106 """
107 If a variable starts and ends with double quotes, remove them.
108 Assumption: if a variable starts with double quotes, it must also
109 end with them.
110 """
111 if var[0] == '"':
112 var = var[1:-1]
113 return var
114
105 115
106def main(): 116def main():
107 global DEFAULT_INSTALL_DIR 117 global DEFAULT_INSTALL_DIR
@@ -117,7 +127,8 @@ def main():
117 127
118 parser = argparse.ArgumentParser( 128 parser = argparse.ArgumentParser(
119 description="Buildtools installation helper", 129 description="Buildtools installation helper",
120 add_help=False) 130 add_help=False,
131 formatter_class=argparse.RawTextHelpFormatter)
121 parser.add_argument('-u', '--url', 132 parser.add_argument('-u', '--url',
122 help='URL from where to fetch buildtools SDK installer, not ' 133 help='URL from where to fetch buildtools SDK installer, not '
123 'including filename (optional)\n' 134 'including filename (optional)\n'
@@ -131,6 +142,9 @@ def main():
131 default=DEFAULT_INSTALL_DIR, 142 default=DEFAULT_INSTALL_DIR,
132 help='directory where buildtools SDK will be installed (optional)', 143 help='directory where buildtools SDK will be installed (optional)',
133 action='store') 144 action='store')
145 parser.add_argument('--downloads-directory',
146 help='use this directory for tarball/checksum downloads and do not erase them (default is a temporary directory which is deleted after unpacking and installing the buildtools)',
147 action='store')
134 parser.add_argument('-r', '--release', 148 parser.add_argument('-r', '--release',
135 default=DEFAULT_RELEASE, 149 default=DEFAULT_RELEASE,
136 help='Yocto Project release string for SDK which will be ' 150 help='Yocto Project release string for SDK which will be '
@@ -154,6 +168,8 @@ def main():
154 group.add_argument('--without-extended-buildtools', action='store_false', 168 group.add_argument('--without-extended-buildtools', action='store_false',
155 dest='with_extended_buildtools', 169 dest='with_extended_buildtools',
156 help='disable extended buildtools (traditional buildtools tarball)') 170 help='disable extended buildtools (traditional buildtools tarball)')
171 group.add_argument('--make-only', action='store_true',
172 help='only install make tarball')
157 group = parser.add_mutually_exclusive_group() 173 group = parser.add_mutually_exclusive_group()
158 group.add_argument('-c', '--check', help='enable checksum validation', 174 group.add_argument('-c', '--check', help='enable checksum validation',
159 default=True, action='store_true') 175 default=True, action='store_true')
@@ -170,6 +186,9 @@ def main():
170 186
171 args = parser.parse_args() 187 args = parser.parse_args()
172 188
189 if args.make_only:
190 args.with_extended_buildtools = False
191
173 if args.debug: 192 if args.debug:
174 logger.setLevel(logging.DEBUG) 193 logger.setLevel(logging.DEBUG)
175 elif args.quiet: 194 elif args.quiet:
@@ -197,7 +216,10 @@ def main():
197 if not args.build_date: 216 if not args.build_date:
198 logger.error("Milestone installers require --build-date") 217 logger.error("Milestone installers require --build-date")
199 else: 218 else:
200 if args.with_extended_buildtools: 219 if args.make_only:
220 filename = "%s-buildtools-make-nativesdk-standalone-%s-%s.sh" % (
221 arch, args.installer_version, args.build_date)
222 elif args.with_extended_buildtools:
201 filename = "%s-buildtools-extended-nativesdk-standalone-%s-%s.sh" % ( 223 filename = "%s-buildtools-extended-nativesdk-standalone-%s-%s.sh" % (
202 arch, args.installer_version, args.build_date) 224 arch, args.installer_version, args.build_date)
203 else: 225 else:
@@ -207,6 +229,8 @@ def main():
207 buildtools_url = "%s/milestones/%s/buildtools/%s" % (base_url, args.release, safe_filename) 229 buildtools_url = "%s/milestones/%s/buildtools/%s" % (base_url, args.release, safe_filename)
208 # regular release SDK 230 # regular release SDK
209 else: 231 else:
232 if args.make_only:
233 filename = "%s-buildtools-make-nativesdk-standalone-%s.sh" % (arch, args.installer_version)
210 if args.with_extended_buildtools: 234 if args.with_extended_buildtools:
211 filename = "%s-buildtools-extended-nativesdk-standalone-%s.sh" % (arch, args.installer_version) 235 filename = "%s-buildtools-extended-nativesdk-standalone-%s.sh" % (arch, args.installer_version)
212 else: 236 else:
@@ -214,11 +238,14 @@ def main():
214 safe_filename = quote(filename) 238 safe_filename = quote(filename)
215 buildtools_url = "%s/%s/buildtools/%s" % (base_url, args.release, safe_filename) 239 buildtools_url = "%s/%s/buildtools/%s" % (base_url, args.release, safe_filename)
216 240
217 tmpsdk_dir = tempfile.mkdtemp() 241 sdk_dir = args.downloads_directory or tempfile.mkdtemp()
242 os.makedirs(sdk_dir, exist_ok=True)
218 try: 243 try:
219 # Fetch installer 244 # Fetch installer
220 logger.info("Fetching buildtools installer") 245 logger.info("Fetching buildtools installer")
221 tmpbuildtools = os.path.join(tmpsdk_dir, filename) 246 tmpbuildtools = os.path.join(sdk_dir, filename)
247 with open(os.path.join(sdk_dir, 'buildtools_url'), 'w') as f:
248 f.write(buildtools_url)
222 ret = subprocess.call("wget -q -O %s %s" % 249 ret = subprocess.call("wget -q -O %s %s" %
223 (tmpbuildtools, buildtools_url), shell=True) 250 (tmpbuildtools, buildtools_url), shell=True)
224 if ret != 0: 251 if ret != 0:
@@ -228,19 +255,17 @@ def main():
228 # Verify checksum 255 # Verify checksum
229 if args.check: 256 if args.check:
230 logger.info("Fetching buildtools installer checksum") 257 logger.info("Fetching buildtools installer checksum")
231 checksum_type = "" 258 checksum_type = "sha256sum"
232 for checksum_type in ["md5sum", "sha256sum"]: 259 checksum_url = "{}.{}".format(buildtools_url, checksum_type)
233 check_url = "{}.{}".format(buildtools_url, checksum_type) 260 checksum_filename = "{}.{}".format(filename, checksum_type)
234 checksum_filename = "{}.{}".format(filename, checksum_type) 261 tmpbuildtools_checksum = os.path.join(sdk_dir, checksum_filename)
235 tmpbuildtools_checksum = os.path.join(tmpsdk_dir, checksum_filename) 262 with open(os.path.join(sdk_dir, 'checksum_url'), 'w') as f:
236 ret = subprocess.call("wget -q -O %s %s" % 263 f.write(checksum_url)
237 (tmpbuildtools_checksum, check_url), shell=True) 264 ret = subprocess.call("wget -q -O %s %s" %
238 if ret == 0: 265 (tmpbuildtools_checksum, checksum_url), shell=True)
239 break 266 if ret != 0:
240 else: 267 logger.error("Could not download file from %s" % checksum_url)
241 if ret != 0: 268 return ret
242 logger.error("Could not download file from %s" % check_url)
243 return ret
244 regex = re.compile(r"^(?P<checksum>[0-9a-f]+)\s+(?P<path>.*/)?(?P<filename>.*)$") 269 regex = re.compile(r"^(?P<checksum>[0-9a-f]+)\s+(?P<path>.*/)?(?P<filename>.*)$")
245 with open(tmpbuildtools_checksum, 'rb') as f: 270 with open(tmpbuildtools_checksum, 'rb') as f:
246 original = f.read() 271 original = f.read()
@@ -253,10 +278,7 @@ def main():
253 logger.error("Filename does not match name in checksum") 278 logger.error("Filename does not match name in checksum")
254 return 1 279 return 1
255 checksum = m.group('checksum') 280 checksum = m.group('checksum')
256 if checksum_type == "md5sum": 281 checksum_value = sha256_file(tmpbuildtools)
257 checksum_value = md5_file(tmpbuildtools)
258 else:
259 checksum_value = sha256_file(tmpbuildtools)
260 if checksum == checksum_value: 282 if checksum == checksum_value:
261 logger.info("Checksum success") 283 logger.info("Checksum success")
262 else: 284 else:
@@ -270,7 +292,7 @@ def main():
270 os.chmod(tmpbuildtools, st.st_mode | stat.S_IEXEC) 292 os.chmod(tmpbuildtools, st.st_mode | stat.S_IEXEC)
271 logger.debug(os.stat(tmpbuildtools)) 293 logger.debug(os.stat(tmpbuildtools))
272 if args.directory: 294 if args.directory:
273 install_dir = args.directory 295 install_dir = os.path.abspath(args.directory)
274 ret = subprocess.call("%s -d %s -y" % 296 ret = subprocess.call("%s -d %s -y" %
275 (tmpbuildtools, install_dir), shell=True) 297 (tmpbuildtools, install_dir), shell=True)
276 else: 298 else:
@@ -291,7 +313,7 @@ def main():
291 if match: 313 if match:
292 env_var = match.group('env_var') 314 env_var = match.group('env_var')
293 logger.debug("env_var: %s" % env_var) 315 logger.debug("env_var: %s" % env_var)
294 env_val = match.group('env_val') 316 env_val = remove_quotes(match.group('env_val'))
295 logger.debug("env_val: %s" % env_val) 317 logger.debug("env_val: %s" % env_val)
296 os.environ[env_var] = env_val 318 os.environ[env_var] = env_val
297 319
@@ -303,7 +325,9 @@ def main():
303 if args.with_extended_buildtools and not m: 325 if args.with_extended_buildtools and not m:
304 logger.info("Ignoring --with-extended-buildtools as filename " 326 logger.info("Ignoring --with-extended-buildtools as filename "
305 "does not contain 'extended'") 327 "does not contain 'extended'")
306 if args.with_extended_buildtools and m: 328 if args.make_only:
329 tool = 'make'
330 elif args.with_extended_buildtools and m:
307 tool = 'gcc' 331 tool = 'gcc'
308 else: 332 else:
309 tool = 'tar' 333 tool = 'tar'
@@ -331,7 +355,8 @@ def main():
331 355
332 finally: 356 finally:
333 # cleanup tmp directory 357 # cleanup tmp directory
334 shutil.rmtree(tmpsdk_dir) 358 if not args.downloads_directory:
359 shutil.rmtree(sdk_dir)
335 360
336 361
337if __name__ == '__main__': 362if __name__ == '__main__':
diff --git a/scripts/lib/argparse_oe.py b/scripts/lib/argparse_oe.py
index 94a4ac5011..176b732bbc 100644
--- a/scripts/lib/argparse_oe.py
+++ b/scripts/lib/argparse_oe.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/scripts/lib/build_perf/html/measurement_chart.html b/scripts/lib/build_perf/html/measurement_chart.html
index 65f1a227ad..86435273cf 100644
--- a/scripts/lib/build_perf/html/measurement_chart.html
+++ b/scripts/lib/build_perf/html/measurement_chart.html
@@ -1,50 +1,168 @@
1<script type="text/javascript"> 1<script type="module">
2 chartsDrawing += 1; 2 // Get raw data
3 google.charts.setOnLoadCallback(drawChart_{{ chart_elem_id }}); 3 const rawData = [
4 function drawChart_{{ chart_elem_id }}() { 4 {% for sample in measurement.samples %}
5 var data = new google.visualization.DataTable(); 5 [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}, {{ sample.start_time }}, '{{sample.commit}}'],
6 6 {% endfor %}
7 // Chart options 7 ];
8 var options = { 8
9 theme : 'material', 9 const convertToMinute = (time) => {
10 legend: 'none', 10 return time[0]*60 + time[1] + time[2]/60 + time[3]/3600;
11 hAxis: { format: '', title: 'Commit number', 11 }
12 minValue: {{ chart_opts.haxis.min }}, 12
13 maxValue: {{ chart_opts.haxis.max }} }, 13 // Update value format to either minutes or leave as size value
14 {% if measurement.type == 'time' %} 14 const updateValue = (value) => {
15 vAxis: { format: 'h:mm:ss' }, 15 // Assuming the array values are duration in the format [hours, minutes, seconds, milliseconds]
16 {% else %} 16 return Array.isArray(value) ? convertToMinute(value) : value
17 vAxis: { format: '' }, 17 }
18 {% endif %} 18
19 pointSize: 5, 19 // Convert raw data to the format: [time, value]
20 chartArea: { left: 80, right: 15 }, 20 const data = rawData.map(([commit, value, time]) => {
21 }; 21 return [
22 22 // The Date object takes values in milliseconds rather than seconds. So to use a Unix timestamp we have to multiply it by 1000.
23 // Define data columns 23 new Date(time * 1000).getTime(),
24 data.addColumn('number', 'Commit'); 24 // Assuming the array values are duration in the format [hours, minutes, seconds, milliseconds]
25 data.addColumn('{{ measurement.value_type.gv_data_type }}', 25 updateValue(value)
26 '{{ measurement.value_type.quantity }}'); 26 ]
27 // Add data rows 27 });
28 data.addRows([ 28
29 {% for sample in measurement.samples %} 29 const commitCountList = rawData.map(([commit, value, time]) => {
30 [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}], 30 return commit
31 {% endfor %} 31 });
32 ]); 32
33 33 const commitCountData = rawData.map(([commit, value, time]) => {
34 // Finally, draw the chart 34 return updateValue(value)
35 chart_div = document.getElementById('{{ chart_elem_id }}'); 35 });
36 var chart = new google.visualization.LineChart(chart_div); 36
37 google.visualization.events.addListener(chart, 'ready', function () { 37 // Set chart options
38 //chart_div = document.getElementById('{{ chart_elem_id }}'); 38 const option_start_time = {
39 //chart_div.innerHTML = '<img src="' + chart.getImageURI() + '">'; 39 tooltip: {
40 png_div = document.getElementById('{{ chart_elem_id }}_png'); 40 trigger: 'axis',
41 png_div.outerHTML = '<a id="{{ chart_elem_id }}_png" href="' + chart.getImageURI() + '">PNG</a>'; 41 enterable: true,
42 console.log("CHART READY: {{ chart_elem_id }}"); 42 position: function (point, params, dom, rect, size) {
43 chartsDrawing -= 1; 43 return [point[0], '0%'];
44 if (chartsDrawing == 0) 44 },
45 console.log("ALL CHARTS READY"); 45 formatter: function (param) {
46 const value = param[0].value[1]
47 const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value)
48 const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)');
49
50 // Add commit hash to the tooltip as a link
51 const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}`
52 if ('{{ measurement.value_type.quantity }}' == 'time') {
53 const hours = Math.floor(value/60)
54 const minutes = Math.floor(value % 60)
55 const seconds = Math.floor((value * 60) % 60)
56 return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
57 }
58 return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
59 ;}
60 },
61 xAxis: {
62 type: 'time',
63 },
64 yAxis: {
65 name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB',
66 type: 'value',
67 min: function(value) {
68 return Math.round(value.min - 0.5);
69 },
70 max: function(value) {
71 return Math.round(value.max + 0.5);
72 }
73 },
74 dataZoom: [
75 {
76 type: 'slider',
77 xAxisIndex: 0,
78 filterMode: 'none'
79 },
80 ],
81 series: [
82 {
83 name: '{{ measurement.value_type.quantity }}',
84 type: 'line',
85 symbol: 'none',
86 data: data
87 }
88 ]
89 };
90
91 const option_commit_count = {
92 tooltip: {
93 trigger: 'axis',
94 enterable: true,
95 position: function (point, params, dom, rect, size) {
96 return [point[0], '0%'];
97 },
98 formatter: function (param) {
99 const value = param[0].value
100 const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value)
101 const formattedDate = new Date(sample[0][2] * 1000).toString().replace(/GMT[+-]\d{4}/, '').replace(/\(.*\)/, '(CEST)');
102 // Add commit hash to the tooltip as a link
103 const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}`
104 if ('{{ measurement.value_type.quantity }}' == 'time') {
105 const hours = Math.floor(value/60)
106 const minutes = Math.floor(value % 60)
107 const seconds = Math.floor((value * 60) % 60)
108 return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
109 }
110 return `<strong>Size:</strong> ${value.toFixed(2)} MB, <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>, <br/> <strong>Start time:</strong> ${formattedDate}`
111 ;}
112 },
113 xAxis: {
114 name: 'Commit count',
115 type: 'category',
116 data: commitCountList
117 },
118 yAxis: {
119 name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB',
120 type: 'value',
121 min: function(value) {
122 return Math.round(value.min - 0.5);
123 },
124 max: function(value) {
125 return Math.round(value.max + 0.5);
126 }
127 },
128 dataZoom: [
129 {
130 type: 'slider',
131 xAxisIndex: 0,
132 filterMode: 'none'
133 },
134 ],
135 series: [
136 {
137 name: '{{ measurement.value_type.quantity }}',
138 type: 'line',
139 symbol: 'none',
140 data: commitCountData
141 }
142 ]
143 };
144
145 // Draw chart
146 const draw_chart = (chart_id, option) => {
147 let chart_name
148 const chart_div = document.getElementById(chart_id);
149 // Set dark mode
150 if (window.matchMedia('(prefers-color-scheme: dark)').matches) {
151 chart_name= echarts.init(chart_div, 'dark', {
152 height: 320
153 });
154 } else {
155 chart_name= echarts.init(chart_div, null, {
156 height: 320
157 });
158 }
159 // Change chart size with browser resize
160 window.addEventListener('resize', function() {
161 chart_name.resize();
46 }); 162 });
47 chart.draw(data, options); 163 return chart_name.setOption(option);
48} 164 }
49</script>
50 165
166 draw_chart('{{ chart_elem_start_time_id }}', option_start_time)
167 draw_chart('{{ chart_elem_commit_count_id }}', option_commit_count)
168</script>
diff --git a/scripts/lib/build_perf/html/report.html b/scripts/lib/build_perf/html/report.html
index d1ba6f2578..28cd80e738 100644
--- a/scripts/lib/build_perf/html/report.html
+++ b/scripts/lib/build_perf/html/report.html
@@ -3,17 +3,14 @@
3<head> 3<head>
4{# Scripts, for visualization#} 4{# Scripts, for visualization#}
5<!--START-OF-SCRIPTS--> 5<!--START-OF-SCRIPTS-->
6<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script> 6<script src=" https://cdn.jsdelivr.net/npm/echarts@5.5.0/dist/echarts.min.js "></script>
7<script type="text/javascript">
8google.charts.load('current', {'packages':['corechart']});
9var chartsDrawing = 0;
10</script>
11 7
12{# Render measurement result charts #} 8{# Render measurement result charts #}
13{% for test in test_data %} 9{% for test in test_data %}
14 {% if test.status == 'SUCCESS' %} 10 {% if test.status == 'SUCCESS' %}
15 {% for measurement in test.measurements %} 11 {% for measurement in test.measurements %}
16 {% set chart_elem_id = test.name + '_' + measurement.name + '_chart' %} 12 {% set chart_elem_start_time_id = test.name + '_' + measurement.name + '_chart_start_time' %}
13 {% set chart_elem_commit_count_id = test.name + '_' + measurement.name + '_chart_commit_count' %}
17 {% include 'measurement_chart.html' %} 14 {% include 'measurement_chart.html' %}
18 {% endfor %} 15 {% endfor %}
19 {% endif %} 16 {% endif %}
@@ -23,28 +20,29 @@ var chartsDrawing = 0;
23 20
24{# Styles #} 21{# Styles #}
25<style> 22<style>
23:root {
24 --text: #000;
25 --bg: #fff;
26 --h2heading: #707070;
27 --link: #0000EE;
28 --trtopborder: #9ca3af;
29 --trborder: #e5e7eb;
30 --chartborder: #f0f0f0;
31 }
26.meta-table { 32.meta-table {
27 font-size: 14px; 33 font-size: 14px;
28 text-align: left; 34 text-align: left;
29 border-collapse: collapse; 35 border-collapse: collapse;
30} 36}
31.meta-table tr:nth-child(even){background-color: #f2f2f2}
32meta-table th, .meta-table td {
33 padding: 4px;
34}
35.summary { 37.summary {
36 margin: 0;
37 font-size: 14px; 38 font-size: 14px;
38 text-align: left; 39 text-align: left;
39 border-collapse: collapse; 40 border-collapse: collapse;
40} 41}
41summary th, .meta-table td {
42 padding: 4px;
43}
44.measurement { 42.measurement {
45 padding: 8px 0px 8px 8px; 43 padding: 8px 0px 8px 8px;
46 border: 2px solid #f0f0f0; 44 border: 2px solid var(--chartborder);
47 margin-bottom: 10px; 45 margin: 1.5rem 0;
48} 46}
49.details { 47.details {
50 margin: 0; 48 margin: 0;
@@ -64,18 +62,97 @@ summary th, .meta-table td {
64 background-color: #f0f0f0; 62 background-color: #f0f0f0;
65 margin-left: 10px; 63 margin-left: 10px;
66} 64}
67hr { 65.card-container {
68 color: #f0f0f0; 66 border-bottom-width: 1px;
67 padding: 1.25rem 3rem;
68 box-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1);
69 border-radius: 0.25rem;
70}
71body {
72 font-family: 'Helvetica', sans-serif;
73 margin: 3rem 8rem;
74 background-color: var(--bg);
75 color: var(--text);
76}
77h1 {
78 text-align: center;
69} 79}
70h2 { 80h2 {
71 font-size: 20px; 81 font-size: 1.5rem;
72 margin-bottom: 0px; 82 margin-bottom: 0px;
73 color: #707070; 83 color: var(--h2heading);
84 padding-top: 1.5rem;
74} 85}
75h3 { 86h3 {
76 font-size: 16px; 87 font-size: 1.3rem;
77 margin: 0px; 88 margin: 0px;
78 color: #707070; 89 color: var(--h2heading);
90 padding: 1.5rem 0;
91}
92h4 {
93 font-size: 14px;
94 font-weight: lighter;
95 line-height: 1.2rem;
96 margin: auto;
97 padding-top: 1rem;
98}
99table {
100 margin-top: 1.5rem;
101 line-height: 2rem;
102}
103tr {
104 border-bottom: 1px solid var(--trborder);
105}
106tr:first-child {
107 border-bottom: 1px solid var(--trtopborder);
108}
109tr:last-child {
110 border-bottom: none;
111}
112a {
113 text-decoration: none;
114 font-weight: bold;
115 color: var(--link);
116}
117a:hover {
118 color: #8080ff;
119}
120button {
121 background-color: #F3F4F6;
122 border: none;
123 outline: none;
124 cursor: pointer;
125 padding: 10px 12px;
126 transition: 0.3s;
127 border-radius: 8px;
128 color: #3A4353;
129}
130button:hover {
131 background-color: #d6d9e0;
132}
133.tab button.active {
134 background-color: #d6d9e0;
135}
136@media (prefers-color-scheme: dark) {
137 :root {
138 --text: #e9e8fa;
139 --bg: #0F0C28;
140 --h2heading: #B8B7CB;
141 --link: #87cefa;
142 --trtopborder: #394150;
143 --trborder: #212936;
144 --chartborder: #b1b0bf;
145 }
146 button {
147 background-color: #28303E;
148 color: #fff;
149 }
150 button:hover {
151 background-color: #545a69;
152 }
153 .tab button.active {
154 background-color: #545a69;
155 }
79} 156}
80</style> 157</style>
81 158
@@ -83,13 +160,14 @@ h3 {
83</head> 160</head>
84 161
85{% macro poky_link(commit) -%} 162{% macro poky_link(commit) -%}
86 <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a> 163 <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a>
87{%- endmacro %} 164{%- endmacro %}
88 165
89<body><div style="width: 700px"> 166<body><div>
167 <h1 style="text-align: center;">Performance Test Report</h1>
90 {# Test metadata #} 168 {# Test metadata #}
91 <h2>General</h2> 169 <h2>General</h2>
92 <hr> 170 <h4>The table provides an overview of the comparison between two selected commits from the same branch.</h4>
93 <table class="meta-table" style="width: 100%"> 171 <table class="meta-table" style="width: 100%">
94 <tr> 172 <tr>
95 <th></th> 173 <th></th>
@@ -112,19 +190,21 @@ h3 {
112 190
113 {# Test result summary #} 191 {# Test result summary #}
114 <h2>Test result summary</h2> 192 <h2>Test result summary</h2>
115 <hr> 193 <h4>The test summary presents a thorough breakdown of each test conducted on the branch, including details such as build time and disk space consumption. Additionally, it gives insights into the average time taken for test execution, along with absolute and relative values for a better understanding.</h4>
116 <table class="summary" style="width: 100%"> 194 <table class="summary" style="width: 100%">
195 <tr>
196 <th>Test name</th>
197 <th>Measurement description</th>
198 <th>Mean value</th>
199 <th>Absolute difference</th>
200 <th>Relative difference</th>
201 </tr>
117 {% for test in test_data %} 202 {% for test in test_data %}
118 {% if loop.index is even %}
119 {% set row_style = 'style="background-color: #f2f2f2"' %}
120 {% else %}
121 {% set row_style = 'style="background-color: #ffffff"' %}
122 {% endif %}
123 {% if test.status == 'SUCCESS' %} 203 {% if test.status == 'SUCCESS' %}
124 {% for measurement in test.measurements %} 204 {% for measurement in test.measurements %}
125 <tr {{ row_style }}> 205 <tr {{ row_style }}>
126 {% if loop.index == 1 %} 206 {% if loop.index == 1 %}
127 <td>{{ test.name }}: {{ test.description }}</td> 207 <td><a href=#{{test.name}}>{{ test.name }}: {{ test.description }}</a></td>
128 {% else %} 208 {% else %}
129 {# add empty cell in place of the test name#} 209 {# add empty cell in place of the test name#}
130 <td></td> 210 <td></td>
@@ -153,10 +233,12 @@ h3 {
153 </table> 233 </table>
154 234
155 {# Detailed test results #} 235 {# Detailed test results #}
236 <h2>Test details</h2>
237 <h4>The following section provides details of each test, accompanied by charts representing build time and disk usage over time or by commit number.</h4>
156 {% for test in test_data %} 238 {% for test in test_data %}
157 <h2>{{ test.name }}: {{ test.description }}</h2> 239 <h3 style="color: #000;" id={{test.name}}>{{ test.name }}: {{ test.description }}</h3>
158 <hr>
159 {% if test.status == 'SUCCESS' %} 240 {% if test.status == 'SUCCESS' %}
241 <div class="card-container">
160 {% for measurement in test.measurements %} 242 {% for measurement in test.measurements %}
161 <div class="measurement"> 243 <div class="measurement">
162 <h3>{{ measurement.description }}</h3> 244 <h3>{{ measurement.description }}</h3>
@@ -178,7 +260,18 @@ h3 {
178 <tr> 260 <tr>
179 <td style="width: 75%"> 261 <td style="width: 75%">
180 {# Linechart #} 262 {# Linechart #}
181 <div id="{{ test.name }}_{{ measurement.name }}_chart"></div> 263 <div class="tab {{ test.name }}_{{ measurement.name }}_tablinks">
264 <button class="tablinks active" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_start_time', '{{ test.name }}_{{ measurement.name }}')">Chart with start time</button>
265 <button class="tablinks" onclick="openChart(event, '{{ test.name }}_{{ measurement.name }}_commit_count', '{{ test.name }}_{{ measurement.name }}')">Chart with commit count</button>
266 </div>
267 <div class="{{ test.name }}_{{ measurement.name }}_tabcontent">
268 <div id="{{ test.name }}_{{ measurement.name }}_start_time" class="tabcontent" style="display: block;">
269 <div id="{{ test.name }}_{{ measurement.name }}_chart_start_time"></div>
270 </div>
271 <div id="{{ test.name }}_{{ measurement.name }}_commit_count" class="tabcontent" style="display: none;">
272 <div id="{{ test.name }}_{{ measurement.name }}_chart_commit_count"></div>
273 </div>
274 </div>
182 </td> 275 </td>
183 <td> 276 <td>
184 {# Measurement statistics #} 277 {# Measurement statistics #}
@@ -275,7 +368,8 @@ h3 {
275 {% endif %} 368 {% endif %}
276 {% endif %} 369 {% endif %}
277 </div> 370 </div>
278 {% endfor %} 371 {% endfor %}
372 </div>
279 {# Unsuccessful test #} 373 {# Unsuccessful test #}
280 {% else %} 374 {% else %}
281 <span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }} 375 <span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }}
@@ -284,6 +378,31 @@ h3 {
284 <div class="preformatted">{{ test.message }}</div> 378 <div class="preformatted">{{ test.message }}</div>
285 {% endif %} 379 {% endif %}
286 {% endfor %} 380 {% endfor %}
287</div></body> 381</div>
288</html>
289 382
383<script>
384function openChart(event, chartType, chartName) {
385 let i, tabcontents, tablinks
386 tabcontents = document.querySelectorAll(`.${chartName}_tabcontent > .tabcontent`);
387 tabcontents.forEach((tabcontent) => {
388 tabcontent.style.display = "none";
389 });
390
391 tablinks = document.querySelectorAll(`.${chartName}_tablinks > .tablinks`);
392 tablinks.forEach((tabLink) => {
393 tabLink.classList.remove('active');
394 });
395
396 const targetTab = document.getElementById(chartType)
397 targetTab.style.display = "block";
398
399 // Call resize on the ECharts instance to redraw the chart
400 const chartContainer = targetTab.querySelector('div')
401 echarts.init(chartContainer).resize();
402
403 event.currentTarget.classList.add('active');
404}
405</script>
406
407</body>
408</html>
diff --git a/scripts/lib/build_perf/report.py b/scripts/lib/build_perf/report.py
index ab77424cc7..f4e6a92e09 100644
--- a/scripts/lib/build_perf/report.py
+++ b/scripts/lib/build_perf/report.py
@@ -294,7 +294,7 @@ class SizeVal(MeasurementVal):
294 return "null" 294 return "null"
295 return self / 1024 295 return self / 1024
296 296
297def measurement_stats(meas, prefix=''): 297def measurement_stats(meas, prefix='', time=0):
298 """Get statistics of a measurement""" 298 """Get statistics of a measurement"""
299 if not meas: 299 if not meas:
300 return {prefix + 'sample_cnt': 0, 300 return {prefix + 'sample_cnt': 0,
@@ -319,6 +319,8 @@ def measurement_stats(meas, prefix=''):
319 stats['quantity'] = val_cls.quantity 319 stats['quantity'] = val_cls.quantity
320 stats[prefix + 'sample_cnt'] = len(values) 320 stats[prefix + 'sample_cnt'] = len(values)
321 321
322 # Add start time for both type sysres and disk usage
323 start_time = time
322 mean_val = val_cls(mean(values)) 324 mean_val = val_cls(mean(values))
323 min_val = val_cls(min(values)) 325 min_val = val_cls(min(values))
324 max_val = val_cls(max(values)) 326 max_val = val_cls(max(values))
@@ -334,6 +336,7 @@ def measurement_stats(meas, prefix=''):
334 stats[prefix + 'max'] = max_val 336 stats[prefix + 'max'] = max_val
335 stats[prefix + 'minus'] = val_cls(mean_val - min_val) 337 stats[prefix + 'minus'] = val_cls(mean_val - min_val)
336 stats[prefix + 'plus'] = val_cls(max_val - mean_val) 338 stats[prefix + 'plus'] = val_cls(max_val - mean_val)
339 stats[prefix + 'start_time'] = start_time
337 340
338 return stats 341 return stats
339 342
diff --git a/scripts/lib/buildstats.py b/scripts/lib/buildstats.py
index c69b5bf4d7..6db60d5bcf 100644
--- a/scripts/lib/buildstats.py
+++ b/scripts/lib/buildstats.py
@@ -8,7 +8,7 @@ import json
8import logging 8import logging
9import os 9import os
10import re 10import re
11from collections import namedtuple,OrderedDict 11from collections import namedtuple
12from statistics import mean 12from statistics import mean
13 13
14 14
@@ -79,8 +79,8 @@ class BSTask(dict):
79 return self['rusage']['ru_oublock'] 79 return self['rusage']['ru_oublock']
80 80
81 @classmethod 81 @classmethod
82 def from_file(cls, buildstat_file): 82 def from_file(cls, buildstat_file, fallback_end=0):
83 """Read buildstat text file""" 83 """Read buildstat text file. fallback_end is an optional end time for tasks that are not recorded as finishing."""
84 bs_task = cls() 84 bs_task = cls()
85 log.debug("Reading task buildstats from %s", buildstat_file) 85 log.debug("Reading task buildstats from %s", buildstat_file)
86 end_time = None 86 end_time = None
@@ -108,7 +108,10 @@ class BSTask(dict):
108 bs_task[ru_type][ru_key] = val 108 bs_task[ru_type][ru_key] = val
109 elif key == 'Status': 109 elif key == 'Status':
110 bs_task['status'] = val 110 bs_task['status'] = val
111 if end_time is not None and start_time is not None: 111 # If the task didn't finish, fill in the fallback end time if specified
112 if start_time and not end_time and fallback_end:
113 end_time = fallback_end
114 if start_time and end_time:
112 bs_task['elapsed_time'] = end_time - start_time 115 bs_task['elapsed_time'] = end_time - start_time
113 else: 116 else:
114 raise BSError("{} looks like a invalid buildstats file".format(buildstat_file)) 117 raise BSError("{} looks like a invalid buildstats file".format(buildstat_file))
@@ -226,25 +229,44 @@ class BuildStats(dict):
226 epoch = match.group('epoch') 229 epoch = match.group('epoch')
227 return name, epoch, version, revision 230 return name, epoch, version, revision
228 231
232 @staticmethod
233 def parse_top_build_stats(path):
234 """
235 Parse the top-level build_stats file for build-wide start and duration.
236 """
237 start = elapsed = 0
238 with open(path) as fobj:
239 for line in fobj.readlines():
240 key, val = line.split(':', 1)
241 val = val.strip()
242 if key == 'Build Started':
243 start = float(val)
244 elif key == "Elapsed time":
245 elapsed = float(val.split()[0])
246 return start, elapsed
247
229 @classmethod 248 @classmethod
230 def from_dir(cls, path): 249 def from_dir(cls, path):
231 """Load buildstats from a buildstats directory""" 250 """Load buildstats from a buildstats directory"""
232 if not os.path.isfile(os.path.join(path, 'build_stats')): 251 top_stats = os.path.join(path, 'build_stats')
252 if not os.path.isfile(top_stats):
233 raise BSError("{} does not look like a buildstats directory".format(path)) 253 raise BSError("{} does not look like a buildstats directory".format(path))
234 254
235 log.debug("Reading buildstats directory %s", path) 255 log.debug("Reading buildstats directory %s", path)
236
237 buildstats = cls() 256 buildstats = cls()
257 build_started, build_elapsed = buildstats.parse_top_build_stats(top_stats)
258 build_end = build_started + build_elapsed
259
238 subdirs = os.listdir(path) 260 subdirs = os.listdir(path)
239 for dirname in subdirs: 261 for dirname in subdirs:
240 recipe_dir = os.path.join(path, dirname) 262 recipe_dir = os.path.join(path, dirname)
241 if not os.path.isdir(recipe_dir): 263 if dirname == "reduced_proc_pressure" or not os.path.isdir(recipe_dir):
242 continue 264 continue
243 name, epoch, version, revision = cls.split_nevr(dirname) 265 name, epoch, version, revision = cls.split_nevr(dirname)
244 bsrecipe = BSRecipe(name, epoch, version, revision) 266 bsrecipe = BSRecipe(name, epoch, version, revision)
245 for task in os.listdir(recipe_dir): 267 for task in os.listdir(recipe_dir):
246 bsrecipe.tasks[task] = BSTask.from_file( 268 bsrecipe.tasks[task] = BSTask.from_file(
247 os.path.join(recipe_dir, task)) 269 os.path.join(recipe_dir, task), build_end)
248 if name in buildstats: 270 if name in buildstats:
249 raise BSError("Cannot handle multiple versions of the same " 271 raise BSError("Cannot handle multiple versions of the same "
250 "package ({})".format(name)) 272 "package ({})".format(name))
diff --git a/scripts/lib/checklayer/__init__.py b/scripts/lib/checklayer/__init__.py
index e69a10f452..86aadf39a6 100644
--- a/scripts/lib/checklayer/__init__.py
+++ b/scripts/lib/checklayer/__init__.py
@@ -16,6 +16,7 @@ class LayerType(Enum):
16 BSP = 0 16 BSP = 0
17 DISTRO = 1 17 DISTRO = 1
18 SOFTWARE = 2 18 SOFTWARE = 2
19 CORE = 3
19 ERROR_NO_LAYER_CONF = 98 20 ERROR_NO_LAYER_CONF = 98
20 ERROR_BSP_DISTRO = 99 21 ERROR_BSP_DISTRO = 99
21 22
@@ -43,7 +44,7 @@ def _get_layer_collections(layer_path, lconf=None, data=None):
43 44
44 ldata.setVar('LAYERDIR', layer_path) 45 ldata.setVar('LAYERDIR', layer_path)
45 try: 46 try:
46 ldata = bb.parse.handle(lconf, ldata, include=True) 47 ldata = bb.parse.handle(lconf, ldata, include=True, baseconfig=True)
47 except: 48 except:
48 raise RuntimeError("Parsing of layer.conf from layer: %s failed" % layer_path) 49 raise RuntimeError("Parsing of layer.conf from layer: %s failed" % layer_path)
49 ldata.expandVarref('LAYERDIR') 50 ldata.expandVarref('LAYERDIR')
@@ -106,7 +107,13 @@ def _detect_layer(layer_path):
106 if distros: 107 if distros:
107 is_distro = True 108 is_distro = True
108 109
109 if is_bsp and is_distro: 110 layer['collections'] = _get_layer_collections(layer['path'])
111
112 if layer_name == "meta" and "core" in layer['collections']:
113 layer['type'] = LayerType.CORE
114 layer['conf']['machines'] = machines
115 layer['conf']['distros'] = distros
116 elif is_bsp and is_distro:
110 layer['type'] = LayerType.ERROR_BSP_DISTRO 117 layer['type'] = LayerType.ERROR_BSP_DISTRO
111 elif is_bsp: 118 elif is_bsp:
112 layer['type'] = LayerType.BSP 119 layer['type'] = LayerType.BSP
@@ -117,8 +124,6 @@ def _detect_layer(layer_path):
117 else: 124 else:
118 layer['type'] = LayerType.SOFTWARE 125 layer['type'] = LayerType.SOFTWARE
119 126
120 layer['collections'] = _get_layer_collections(layer['path'])
121
122 return layer 127 return layer
123 128
124def detect_layers(layer_directories, no_auto): 129def detect_layers(layer_directories, no_auto):
@@ -156,6 +161,27 @@ def _find_layer(depend, layers):
156 return layer 161 return layer
157 return None 162 return None
158 163
164def sanity_check_layers(layers, logger):
165 """
166 Check that we didn't find duplicate collection names, as the layer that will
167 be used is non-deterministic. The precise check is duplicate collections
168 with different patterns, as the same pattern being repeated won't cause
169 problems.
170 """
171 import collections
172
173 passed = True
174 seen = collections.defaultdict(set)
175 for layer in layers:
176 for name, data in layer.get("collections", {}).items():
177 seen[name].add(data["pattern"])
178
179 for name, patterns in seen.items():
180 if len(patterns) > 1:
181 passed = False
182 logger.error("Collection %s found multiple times: %s" % (name, ", ".join(patterns)))
183 return passed
184
159def get_layer_dependencies(layer, layers, logger): 185def get_layer_dependencies(layer, layers, logger):
160 def recurse_dependencies(depends, layer, layers, logger, ret = []): 186 def recurse_dependencies(depends, layer, layers, logger, ret = []):
161 logger.debug('Processing dependencies %s for layer %s.' % \ 187 logger.debug('Processing dependencies %s for layer %s.' % \
@@ -261,7 +287,7 @@ def check_command(error_msg, cmd, cwd=None):
261 raise RuntimeError(msg) 287 raise RuntimeError(msg)
262 return output 288 return output
263 289
264def get_signatures(builddir, failsafe=False, machine=None): 290def get_signatures(builddir, failsafe=False, machine=None, extravars=None):
265 import re 291 import re
266 292
267 # some recipes needs to be excluded like meta-world-pkgdata 293 # some recipes needs to be excluded like meta-world-pkgdata
@@ -272,13 +298,16 @@ def get_signatures(builddir, failsafe=False, machine=None):
272 sigs = {} 298 sigs = {}
273 tune2tasks = {} 299 tune2tasks = {}
274 300
275 cmd = 'BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" ' 301 cmd = 'BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" '
302 if extravars:
303 cmd += extravars
304 cmd += ' '
276 if machine: 305 if machine:
277 cmd += 'MACHINE=%s ' % machine 306 cmd += 'MACHINE=%s ' % machine
278 cmd += 'bitbake ' 307 cmd += 'bitbake '
279 if failsafe: 308 if failsafe:
280 cmd += '-k ' 309 cmd += '-k '
281 cmd += '-S none world' 310 cmd += '-S lockedsigs world'
282 sigs_file = os.path.join(builddir, 'locked-sigs.inc') 311 sigs_file = os.path.join(builddir, 'locked-sigs.inc')
283 if os.path.exists(sigs_file): 312 if os.path.exists(sigs_file):
284 os.unlink(sigs_file) 313 os.unlink(sigs_file)
@@ -295,8 +324,8 @@ def get_signatures(builddir, failsafe=False, machine=None):
295 else: 324 else:
296 raise 325 raise
297 326
298 sig_regex = re.compile("^(?P<task>.*:.*):(?P<hash>.*) .$") 327 sig_regex = re.compile(r"^(?P<task>.*:.*):(?P<hash>.*) .$")
299 tune_regex = re.compile("(^|\s)SIGGEN_LOCKEDSIGS_t-(?P<tune>\S*)\s*=\s*") 328 tune_regex = re.compile(r"(^|\s)SIGGEN_LOCKEDSIGS_t-(?P<tune>\S*)\s*=\s*")
300 current_tune = None 329 current_tune = None
301 with open(sigs_file, 'r') as f: 330 with open(sigs_file, 'r') as f:
302 for line in f.readlines(): 331 for line in f.readlines():
@@ -423,3 +452,15 @@ def compare_signatures(old_sigs, curr_sigs):
423 msg.extend([' ' + line for line in output.splitlines()]) 452 msg.extend([' ' + line for line in output.splitlines()])
424 msg.append('') 453 msg.append('')
425 return '\n'.join(msg) 454 return '\n'.join(msg)
455
456
457def get_git_toplevel(directory):
458 """
459 Try and find the top of the git repository that directory might be in.
460 Returns the top-level directory, or None.
461 """
462 cmd = ["git", "-C", directory, "rev-parse", "--show-toplevel"]
463 try:
464 return subprocess.check_output(cmd, text=True).strip()
465 except:
466 return None
diff --git a/scripts/lib/checklayer/cases/bsp.py b/scripts/lib/checklayer/cases/bsp.py
index 7fd56f5d36..b76163fb56 100644
--- a/scripts/lib/checklayer/cases/bsp.py
+++ b/scripts/lib/checklayer/cases/bsp.py
@@ -11,7 +11,7 @@ from checklayer.case import OECheckLayerTestCase
11class BSPCheckLayer(OECheckLayerTestCase): 11class BSPCheckLayer(OECheckLayerTestCase):
12 @classmethod 12 @classmethod
13 def setUpClass(self): 13 def setUpClass(self):
14 if self.tc.layer['type'] != LayerType.BSP: 14 if self.tc.layer['type'] not in (LayerType.BSP, LayerType.CORE):
15 raise unittest.SkipTest("BSPCheckLayer: Layer %s isn't BSP one." %\ 15 raise unittest.SkipTest("BSPCheckLayer: Layer %s isn't BSP one." %\
16 self.tc.layer['name']) 16 self.tc.layer['name'])
17 17
@@ -153,7 +153,7 @@ class BSPCheckLayer(OECheckLayerTestCase):
153 # do_build can be ignored: it is know to have 153 # do_build can be ignored: it is know to have
154 # different signatures in some cases, for example in 154 # different signatures in some cases, for example in
155 # the allarch ca-certificates due to RDEPENDS=openssl. 155 # the allarch ca-certificates due to RDEPENDS=openssl.
156 # That particular dependency is whitelisted via 156 # That particular dependency is marked via
157 # SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS, but still shows up 157 # SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS, but still shows up
158 # in the sstate signature hash because filtering it 158 # in the sstate signature hash because filtering it
159 # out would be hard and running do_build multiple 159 # out would be hard and running do_build multiple
diff --git a/scripts/lib/checklayer/cases/common.py b/scripts/lib/checklayer/cases/common.py
index fdfb5d18cd..ddead69a7b 100644
--- a/scripts/lib/checklayer/cases/common.py
+++ b/scripts/lib/checklayer/cases/common.py
@@ -7,11 +7,14 @@ import glob
7import os 7import os
8import unittest 8import unittest
9import re 9import re
10from checklayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures 10from checklayer import get_signatures, LayerType, check_command, compare_signatures, get_git_toplevel
11from checklayer.case import OECheckLayerTestCase 11from checklayer.case import OECheckLayerTestCase
12 12
13class CommonCheckLayer(OECheckLayerTestCase): 13class CommonCheckLayer(OECheckLayerTestCase):
14 def test_readme(self): 14 def test_readme(self):
15 if self.tc.layer['type'] == LayerType.CORE:
16 raise unittest.SkipTest("Core layer's README is top level")
17
15 # The top-level README file may have a suffix (like README.rst or README.txt). 18 # The top-level README file may have a suffix (like README.rst or README.txt).
16 readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*')) 19 readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*'))
17 self.assertTrue(len(readme_files) > 0, 20 self.assertTrue(len(readme_files) > 0,
@@ -31,12 +34,44 @@ class CommonCheckLayer(OECheckLayerTestCase):
31 if re.search('README', data, re.IGNORECASE): 34 if re.search('README', data, re.IGNORECASE):
32 return 35 return
33 36
34 self.assertIn('maintainer', data) 37 self.assertIn('maintainer', data.lower())
35 self.assertIn('patch',data) 38 self.assertIn('patch', data.lower())
36 # Check that there is an email address in the README 39 # Check that there is an email address in the README
37 email_regex = re.compile(r"[^@]+@[^@]+") 40 email_regex = re.compile(r"[^@]+@[^@]+")
38 self.assertTrue(email_regex.match(data)) 41 self.assertTrue(email_regex.match(data))
39 42
43 def find_file_by_name(self, globs):
44 """
45 Utility function to find a file that matches the specified list of
46 globs, in either the layer directory itself or the repository top-level
47 directory.
48 """
49 directories = [self.tc.layer["path"]]
50 toplevel = get_git_toplevel(directories[0])
51 if toplevel:
52 directories.append(toplevel)
53
54 for path in directories:
55 for name in globs:
56 files = glob.glob(os.path.join(path, name))
57 if files:
58 return sorted(files)[0]
59 return None
60
61 def test_security(self):
62 """
63 Test that the layer has a SECURITY.md (or similar) file, either in the
64 layer itself or at the top of the containing git repository.
65 """
66 if self.tc.layer["type"] == LayerType.CORE:
67 raise unittest.SkipTest("Core layer's SECURITY is top level")
68
69 filename = self.find_file_by_name(("SECURITY", "SECURITY.*"))
70 self.assertTrue(filename, msg="Layer doesn't contain a SECURITY.md file.")
71
72 size = os.path.getsize(filename)
73 self.assertGreater(size, 0, msg=f"{filename} has no content.")
74
40 def test_parse(self): 75 def test_parse(self):
41 check_command('Layer %s failed to parse.' % self.tc.layer['name'], 76 check_command('Layer %s failed to parse.' % self.tc.layer['name'],
42 'bitbake -p') 77 'bitbake -p')
@@ -54,6 +89,35 @@ class CommonCheckLayer(OECheckLayerTestCase):
54 ''' 89 '''
55 get_signatures(self.td['builddir'], failsafe=False) 90 get_signatures(self.td['builddir'], failsafe=False)
56 91
92 def test_world_inherit_class(self):
93 '''
94 This also does "bitbake -S none world" along with inheriting "yocto-check-layer"
95 class, which can do additional per-recipe test cases.
96 '''
97 msg = []
98 try:
99 get_signatures(self.td['builddir'], failsafe=False, machine=None, extravars='BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS INHERIT" INHERIT="yocto-check-layer"')
100 except RuntimeError as ex:
101 msg.append(str(ex))
102 if msg:
103 msg.insert(0, 'Layer %s failed additional checks from yocto-check-layer.bbclass\nSee below log for specific recipe parsing errors:\n' % \
104 self.tc.layer['name'])
105 self.fail('\n'.join(msg))
106
107 def test_patches_upstream_status(self):
108 import sys
109 sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/'))
110 import oe.qa
111 patches = []
112 for dirpath, dirs, files in os.walk(self.tc.layer['path']):
113 for filename in files:
114 if filename.endswith(".patch"):
115 ppath = os.path.join(dirpath, filename)
116 if oe.qa.check_upstream_status(ppath):
117 patches.append(ppath)
118 self.assertEqual(len(patches), 0 , \
119 msg="Found following patches with malformed or missing upstream status:\n%s" % '\n'.join([str(patch) for patch in patches]))
120
57 def test_signatures(self): 121 def test_signatures(self):
58 if self.tc.layer['type'] == LayerType.SOFTWARE and \ 122 if self.tc.layer['type'] == LayerType.SOFTWARE and \
59 not self.tc.test_software_layer_signatures: 123 not self.tc.test_software_layer_signatures:
diff --git a/scripts/lib/checklayer/cases/distro.py b/scripts/lib/checklayer/cases/distro.py
index f0bee5493c..a35332451c 100644
--- a/scripts/lib/checklayer/cases/distro.py
+++ b/scripts/lib/checklayer/cases/distro.py
@@ -11,7 +11,7 @@ from checklayer.case import OECheckLayerTestCase
11class DistroCheckLayer(OECheckLayerTestCase): 11class DistroCheckLayer(OECheckLayerTestCase):
12 @classmethod 12 @classmethod
13 def setUpClass(self): 13 def setUpClass(self):
14 if self.tc.layer['type'] != LayerType.DISTRO: 14 if self.tc.layer['type'] not in (LayerType.DISTRO, LayerType.CORE):
15 raise unittest.SkipTest("DistroCheckLayer: Layer %s isn't Distro one." %\ 15 raise unittest.SkipTest("DistroCheckLayer: Layer %s isn't Distro one." %\
16 self.tc.layer['name']) 16 self.tc.layer['name'])
17 17
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py
index 702db669de..fa6e1a34fd 100644
--- a/scripts/lib/devtool/__init__.py
+++ b/scripts/lib/devtool/__init__.py
@@ -78,12 +78,15 @@ def exec_fakeroot(d, cmd, **kwargs):
78 """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions""" 78 """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions"""
79 # Grab the command and check it actually exists 79 # Grab the command and check it actually exists
80 fakerootcmd = d.getVar('FAKEROOTCMD') 80 fakerootcmd = d.getVar('FAKEROOTCMD')
81 fakerootenv = d.getVar('FAKEROOTENV')
82 exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, kwargs)
83
84def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs):
81 if not os.path.exists(fakerootcmd): 85 if not os.path.exists(fakerootcmd):
82 logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built') 86 logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built')
83 return 2 87 return 2
84 # Set up the appropriate environment 88 # Set up the appropriate environment
85 newenv = dict(os.environ) 89 newenv = dict(os.environ)
86 fakerootenv = d.getVar('FAKEROOTENV')
87 for varvalue in fakerootenv.split(): 90 for varvalue in fakerootenv.split():
88 if '=' in varvalue: 91 if '=' in varvalue:
89 splitval = varvalue.split('=', 1) 92 splitval = varvalue.split('=', 1)
@@ -231,7 +234,29 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
231 f.write(line) 234 f.write(line)
232 235
233 bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) 236 bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
234 bb.process.run('git tag -f %s' % basetag, cwd=repodir) 237 bb.process.run('git tag -f --no-sign %s' % basetag, cwd=repodir)
238
239 # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now,
240 # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe
241 stdout, _ = bb.process.run("git status --porcelain", cwd=repodir)
242 found = False
243 for line in stdout.splitlines():
244 if line.endswith("/"):
245 new_dir = line.split()[1]
246 for root, dirs, files in os.walk(os.path.join(repodir, new_dir)):
247 if ".git" in dirs + files:
248 (stdout, _) = bb.process.run('git remote', cwd=root)
249 remote = stdout.splitlines()[0]
250 (stdout, _) = bb.process.run('git remote get-url %s' % remote, cwd=root)
251 remote_url = stdout.splitlines()[0]
252 logger.error(os.path.relpath(os.path.join(root, ".."), root))
253 bb.process.run('git submodule add %s %s' % (remote_url, os.path.relpath(root, os.path.join(root, ".."))), cwd=os.path.join(root, ".."))
254 found = True
255 if found:
256 oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d)
257 found = False
258 if os.path.exists(os.path.join(repodir, '.gitmodules')):
259 bb.process.run('git submodule foreach --recursive "git tag -f --no-sign %s"' % basetag, cwd=repodir)
235 260
236def recipe_to_append(recipefile, config, wildcard=False): 261def recipe_to_append(recipefile, config, wildcard=False):
237 """ 262 """
diff --git a/scripts/lib/devtool/build.py b/scripts/lib/devtool/build.py
index 935ffab46c..0b2c3d33dc 100644
--- a/scripts/lib/devtool/build.py
+++ b/scripts/lib/devtool/build.py
@@ -49,7 +49,7 @@ def build(args, config, basepath, workspace):
49 rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) 49 rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
50 if not rd: 50 if not rd:
51 return 1 51 return 1
52 deploytask = 'do_deploy' in rd.getVar('__BBTASKS') 52 deploytask = 'do_deploy' in bb.build.listtasks(rd)
53 finally: 53 finally:
54 tinfoil.shutdown() 54 tinfoil.shutdown()
55 55
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py
index 6fe02fff2a..990303982c 100644
--- a/scripts/lib/devtool/build_sdk.py
+++ b/scripts/lib/devtool/build_sdk.py
@@ -5,15 +5,8 @@
5# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
6# 6#
7 7
8import os
9import subprocess
10import logging 8import logging
11import glob 9from devtool import DevtoolError
12import shutil
13import errno
14import sys
15import tempfile
16from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
17from devtool import build_image 10from devtool import build_image
18 11
19logger = logging.getLogger('devtool') 12logger = logging.getLogger('devtool')
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py
index 833322571f..b5ca8f2c2f 100644
--- a/scripts/lib/devtool/deploy.py
+++ b/scripts/lib/devtool/deploy.py
@@ -16,7 +16,7 @@ import bb.utils
16import argparse_oe 16import argparse_oe
17import oe.types 17import oe.types
18 18
19from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError 19from devtool import exec_fakeroot_no_d, setup_tinfoil, check_workspace_recipe, DevtoolError
20 20
21logger = logging.getLogger('devtool') 21logger = logging.getLogger('devtool')
22 22
@@ -133,16 +133,38 @@ def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=Fals
133 133
134 return '\n'.join(lines) 134 return '\n'.join(lines)
135 135
136
137
138def deploy(args, config, basepath, workspace): 136def deploy(args, config, basepath, workspace):
139 """Entry point for the devtool 'deploy' subcommand""" 137 """Entry point for the devtool 'deploy' subcommand"""
140 import math 138 import oe.utils
141 import oe.recipeutils
142 import oe.package
143 139
144 check_workspace_recipe(workspace, args.recipename, checksrc=False) 140 check_workspace_recipe(workspace, args.recipename, checksrc=False)
145 141
142 tinfoil = setup_tinfoil(basepath=basepath)
143 try:
144 try:
145 rd = tinfoil.parse_recipe(args.recipename)
146 except Exception as e:
147 raise DevtoolError('Exception parsing recipe %s: %s' %
148 (args.recipename, e))
149
150 srcdir = rd.getVar('D')
151 workdir = rd.getVar('WORKDIR')
152 path = rd.getVar('PATH')
153 strip_cmd = rd.getVar('STRIP')
154 libdir = rd.getVar('libdir')
155 base_libdir = rd.getVar('base_libdir')
156 max_process = oe.utils.get_bb_number_threads(rd)
157 fakerootcmd = rd.getVar('FAKEROOTCMD')
158 fakerootenv = rd.getVar('FAKEROOTENV')
159 finally:
160 tinfoil.shutdown()
161
162 return deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args)
163
164def deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args):
165 import math
166 import oe.package
167
146 try: 168 try:
147 host, destdir = args.target.split(':') 169 host, destdir = args.target.split(':')
148 except ValueError: 170 except ValueError:
@@ -152,118 +174,108 @@ def deploy(args, config, basepath, workspace):
152 if not destdir.endswith('/'): 174 if not destdir.endswith('/'):
153 destdir += '/' 175 destdir += '/'
154 176
155 tinfoil = setup_tinfoil(basepath=basepath) 177 recipe_outdir = srcdir
156 try: 178 if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
157 try: 179 raise DevtoolError('No files to deploy - have you built the %s '
158 rd = tinfoil.parse_recipe(args.recipename) 180 'recipe? If so, the install step has not installed '
159 except Exception as e: 181 'any files.' % args.recipename)
160 raise DevtoolError('Exception parsing recipe %s: %s' % 182
161 (args.recipename, e)) 183 if args.strip and not args.dry_run:
162 recipe_outdir = rd.getVar('D') 184 # Fakeroot copy to new destination
163 if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): 185 srcdir = recipe_outdir
164 raise DevtoolError('No files to deploy - have you built the %s ' 186 recipe_outdir = os.path.join(workdir, 'devtool-deploy-target-stripped')
165 'recipe? If so, the install step has not installed ' 187 if os.path.isdir(recipe_outdir):
166 'any files.' % args.recipename) 188 exec_fakeroot_no_d(fakerootcmd, fakerootenv, "rm -rf %s" % recipe_outdir, shell=True)
167 189 exec_fakeroot_no_d(fakerootcmd, fakerootenv, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
168 if args.strip and not args.dry_run: 190 os.environ['PATH'] = ':'.join([os.environ['PATH'], path or ''])
169 # Fakeroot copy to new destination 191 oe.package.strip_execs(args.recipename, recipe_outdir, strip_cmd, libdir, base_libdir, max_process)
170 srcdir = recipe_outdir 192
171 recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'devtool-deploy-target-stripped') 193 filelist = []
172 if os.path.isdir(recipe_outdir): 194 inodes = set({})
173 bb.utils.remove(recipe_outdir, True) 195 ftotalsize = 0
174 exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True) 196 for root, _, files in os.walk(recipe_outdir):
175 os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or '']) 197 for fn in files:
176 oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'), 198 fstat = os.lstat(os.path.join(root, fn))
177 rd.getVar('base_libdir'), rd) 199 # Get the size in kiB (since we'll be comparing it to the output of du -k)
178 200 # MUST use lstat() here not stat() or getfilesize() since we don't want to
179 filelist = [] 201 # dereference symlinks
180 inodes = set({}) 202 if fstat.st_ino in inodes:
181 ftotalsize = 0 203 fsize = 0
182 for root, _, files in os.walk(recipe_outdir): 204 else:
183 for fn in files: 205 fsize = int(math.ceil(float(fstat.st_size)/1024))
184 fstat = os.lstat(os.path.join(root, fn)) 206 inodes.add(fstat.st_ino)
185 # Get the size in kiB (since we'll be comparing it to the output of du -k) 207 ftotalsize += fsize
186 # MUST use lstat() here not stat() or getfilesize() since we don't want to 208 # The path as it would appear on the target
187 # dereference symlinks 209 fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
188 if fstat.st_ino in inodes: 210 filelist.append((fpath, fsize))
189 fsize = 0 211
190 else: 212 if args.dry_run:
191 fsize = int(math.ceil(float(fstat.st_size)/1024)) 213 print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
192 inodes.add(fstat.st_ino) 214 for item, _ in filelist:
193 ftotalsize += fsize 215 print(' %s' % item)
194 # The path as it would appear on the target 216 return 0
195 fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
196 filelist.append((fpath, fsize))
197
198 if args.dry_run:
199 print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
200 for item, _ in filelist:
201 print(' %s' % item)
202 return 0
203
204 extraoptions = ''
205 if args.no_host_check:
206 extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
207 if not args.show_status:
208 extraoptions += ' -q'
209
210 scp_sshexec = ''
211 ssh_sshexec = 'ssh'
212 if args.ssh_exec:
213 scp_sshexec = "-S %s" % args.ssh_exec
214 ssh_sshexec = args.ssh_exec
215 scp_port = ''
216 ssh_port = ''
217 if args.port:
218 scp_port = "-P %s" % args.port
219 ssh_port = "-p %s" % args.port
220
221 if args.key:
222 extraoptions += ' -i %s' % args.key
223
224 # In order to delete previously deployed files and have the manifest file on
225 # the target, we write out a shell script and then copy it to the target
226 # so we can then run it (piping tar output to it).
227 # (We cannot use scp here, because it doesn't preserve symlinks.)
228 tmpdir = tempfile.mkdtemp(prefix='devtool')
229 try:
230 tmpscript = '/tmp/devtool_deploy.sh'
231 tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
232 shellscript = _prepare_remote_script(deploy=True,
233 verbose=args.show_status,
234 nopreserve=args.no_preserve,
235 nocheckspace=args.no_check_space)
236 # Write out the script to a file
237 with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
238 f.write(shellscript)
239 # Write out the file list
240 with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
241 f.write('%d\n' % ftotalsize)
242 for fpath, fsize in filelist:
243 f.write('%s %d\n' % (fpath, fsize))
244 # Copy them to the target
245 ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
246 if ret != 0:
247 raise DevtoolError('Failed to copy script to %s - rerun with -s to '
248 'get a complete error message' % args.target)
249 finally:
250 shutil.rmtree(tmpdir)
251 217
252 # Now run the script 218 extraoptions = ''
253 ret = exec_fakeroot(rd, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True) 219 if args.no_host_check:
254 if ret != 0: 220 extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
255 raise DevtoolError('Deploy failed - rerun with -s to get a complete ' 221 if not args.show_status:
256 'error message') 222 extraoptions += ' -q'
257 223
258 logger.info('Successfully deployed %s' % recipe_outdir) 224 scp_sshexec = ''
225 ssh_sshexec = 'ssh'
226 if args.ssh_exec:
227 scp_sshexec = "-S %s" % args.ssh_exec
228 ssh_sshexec = args.ssh_exec
229 scp_port = ''
230 ssh_port = ''
231 if args.port:
232 scp_port = "-P %s" % args.port
233 ssh_port = "-p %s" % args.port
234
235 if args.key:
236 extraoptions += ' -i %s' % args.key
259 237
260 files_list = [] 238 # In order to delete previously deployed files and have the manifest file on
261 for root, _, files in os.walk(recipe_outdir): 239 # the target, we write out a shell script and then copy it to the target
262 for filename in files: 240 # so we can then run it (piping tar output to it).
263 filename = os.path.relpath(os.path.join(root, filename), recipe_outdir) 241 # (We cannot use scp here, because it doesn't preserve symlinks.)
264 files_list.append(os.path.join(destdir, filename)) 242 tmpdir = tempfile.mkdtemp(prefix='devtool')
243 try:
244 tmpscript = '/tmp/devtool_deploy.sh'
245 tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
246 shellscript = _prepare_remote_script(deploy=True,
247 verbose=args.show_status,
248 nopreserve=args.no_preserve,
249 nocheckspace=args.no_check_space)
250 # Write out the script to a file
251 with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
252 f.write(shellscript)
253 # Write out the file list
254 with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
255 f.write('%d\n' % ftotalsize)
256 for fpath, fsize in filelist:
257 f.write('%s %d\n' % (fpath, fsize))
258 # Copy them to the target
259 ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
260 if ret != 0:
261 raise DevtoolError('Failed to copy script to %s - rerun with -s to '
262 'get a complete error message' % args.target)
265 finally: 263 finally:
266 tinfoil.shutdown() 264 shutil.rmtree(tmpdir)
265
266 # Now run the script
267 ret = exec_fakeroot_no_d(fakerootcmd, fakerootenv, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
268 if ret != 0:
269 raise DevtoolError('Deploy failed - rerun with -s to get a complete '
270 'error message')
271
272 logger.info('Successfully deployed %s' % recipe_outdir)
273
274 files_list = []
275 for root, _, files in os.walk(recipe_outdir):
276 for filename in files:
277 filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
278 files_list.append(os.path.join(destdir, filename))
267 279
268 return 0 280 return 0
269 281
diff --git a/scripts/lib/devtool/ide_plugins/__init__.py b/scripts/lib/devtool/ide_plugins/__init__.py
new file mode 100644
index 0000000000..19c2f61c5f
--- /dev/null
+++ b/scripts/lib/devtool/ide_plugins/__init__.py
@@ -0,0 +1,282 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk IDE plugin interface definition and helper functions"""
7
8import errno
9import json
10import logging
11import os
12import stat
13from enum import Enum, auto
14from devtool import DevtoolError
15from bb.utils import mkdirhier
16
17logger = logging.getLogger('devtool')
18
19
20class BuildTool(Enum):
21 UNDEFINED = auto()
22 CMAKE = auto()
23 MESON = auto()
24
25 @property
26 def is_c_ccp(self):
27 if self is BuildTool.CMAKE:
28 return True
29 if self is BuildTool.MESON:
30 return True
31 return False
32
33
34class GdbCrossConfig:
35 """Base class defining the GDB configuration generator interface
36
37 Generate a GDB configuration for a binary on the target device.
38 Only one instance per binary is allowed. This allows to assign unique port
39 numbers for all gdbserver instances.
40 """
41 _gdbserver_port_next = 1234
42 _binaries = []
43
44 def __init__(self, image_recipe, modified_recipe, binary, gdbserver_multi=True):
45 self.image_recipe = image_recipe
46 self.modified_recipe = modified_recipe
47 self.gdb_cross = modified_recipe.gdb_cross
48 self.binary = binary
49 if binary in GdbCrossConfig._binaries:
50 raise DevtoolError(
51 "gdbserver config for binary %s is already generated" % binary)
52 GdbCrossConfig._binaries.append(binary)
53 self.script_dir = modified_recipe.ide_sdk_scripts_dir
54 self.gdbinit_dir = os.path.join(self.script_dir, 'gdbinit')
55 self.gdbserver_multi = gdbserver_multi
56 self.binary_pretty = self.binary.replace(os.sep, '-').lstrip('-')
57 self.gdbserver_port = GdbCrossConfig._gdbserver_port_next
58 GdbCrossConfig._gdbserver_port_next += 1
59 self.id_pretty = "%d_%s" % (self.gdbserver_port, self.binary_pretty)
60 # gdbserver start script
61 gdbserver_script_file = 'gdbserver_' + self.id_pretty
62 if self.gdbserver_multi:
63 gdbserver_script_file += "_m"
64 self.gdbserver_script = os.path.join(
65 self.script_dir, gdbserver_script_file)
66 # gdbinit file
67 self.gdbinit = os.path.join(
68 self.gdbinit_dir, 'gdbinit_' + self.id_pretty)
69 # gdb start script
70 self.gdb_script = os.path.join(
71 self.script_dir, 'gdb_' + self.id_pretty)
72
73 def _gen_gdbserver_start_script(self):
74 """Generate a shell command starting the gdbserver on the remote device via ssh
75
76 GDB supports two modes:
77 multi: gdbserver remains running over several debug sessions
78 once: gdbserver terminates after the debugged process terminates
79 """
80 cmd_lines = ['#!/bin/sh']
81 if self.gdbserver_multi:
82 temp_dir = "TEMP_DIR=/tmp/gdbserver_%s; " % self.id_pretty
83 gdbserver_cmd_start = temp_dir
84 gdbserver_cmd_start += "test -f \\$TEMP_DIR/pid && exit 0; "
85 gdbserver_cmd_start += "mkdir -p \\$TEMP_DIR; "
86 gdbserver_cmd_start += "%s --multi :%s > \\$TEMP_DIR/log 2>&1 & " % (
87 self.gdb_cross.gdbserver_path, self.gdbserver_port)
88 gdbserver_cmd_start += "echo \\$! > \\$TEMP_DIR/pid;"
89
90 gdbserver_cmd_stop = temp_dir
91 gdbserver_cmd_stop += "test -f \\$TEMP_DIR/pid && kill \\$(cat \\$TEMP_DIR/pid); "
92 gdbserver_cmd_stop += "rm -rf \\$TEMP_DIR; "
93
94 gdbserver_cmd_l = []
95 gdbserver_cmd_l.append('if [ "$1" = "stop" ]; then')
96 gdbserver_cmd_l.append(' shift')
97 gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
98 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_stop))
99 gdbserver_cmd_l.append('else')
100 gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
101 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start))
102 gdbserver_cmd_l.append('fi')
103 gdbserver_cmd = os.linesep.join(gdbserver_cmd_l)
104 else:
105 gdbserver_cmd_start = "%s --once :%s %s" % (
106 self.gdb_cross.gdbserver_path, self.gdbserver_port, self.binary)
107 gdbserver_cmd = "%s %s %s %s 'sh -c \"%s\"'" % (
108 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start)
109 cmd_lines.append(gdbserver_cmd)
110 GdbCrossConfig.write_file(self.gdbserver_script, cmd_lines, True)
111
112 def _gen_gdbinit_config(self):
113 """Generate a gdbinit file for this binary and the corresponding gdbserver configuration"""
114 gdbinit_lines = ['# This file is generated by devtool ide-sdk']
115 if self.gdbserver_multi:
116 target_help = '# gdbserver --multi :%d' % self.gdbserver_port
117 remote_cmd = 'target extended-remote'
118 else:
119 target_help = '# gdbserver :%d %s' % (
120 self.gdbserver_port, self.binary)
121 remote_cmd = 'target remote'
122 gdbinit_lines.append('# On the remote target:')
123 gdbinit_lines.append(target_help)
124 gdbinit_lines.append('# On the build machine:')
125 gdbinit_lines.append('# cd ' + self.modified_recipe.real_srctree)
126 gdbinit_lines.append(
127 '# ' + self.gdb_cross.gdb + ' -ix ' + self.gdbinit)
128
129 gdbinit_lines.append('set sysroot ' + self.modified_recipe.d)
130 gdbinit_lines.append('set substitute-path "/usr/include" "' +
131 os.path.join(self.modified_recipe.recipe_sysroot, 'usr', 'include') + '"')
132 # Disable debuginfod for now, the IDE configuration uses rootfs-dbg from the image workdir.
133 gdbinit_lines.append('set debuginfod enabled off')
134 if self.image_recipe.rootfs_dbg:
135 gdbinit_lines.append(
136 'set solib-search-path "' + self.modified_recipe.solib_search_path_str(self.image_recipe) + '"')
137 # First: Search for sources of this recipe in the workspace folder
138 if self.modified_recipe.pn in self.modified_recipe.target_dbgsrc_dir:
139 gdbinit_lines.append('set substitute-path "%s" "%s"' %
140 (self.modified_recipe.target_dbgsrc_dir, self.modified_recipe.real_srctree))
141 else:
142 logger.error(
143 "TARGET_DBGSRC_DIR must contain the recipe name PN.")
144 # Second: Search for sources of other recipes in the rootfs-dbg
145 if self.modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
146 gdbinit_lines.append('set substitute-path "/usr/src/debug" "%s"' % os.path.join(
147 self.image_recipe.rootfs_dbg, "usr", "src", "debug"))
148 else:
149 logger.error(
150 "TARGET_DBGSRC_DIR must start with /usr/src/debug.")
151 else:
152 logger.warning(
153 "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
154 gdbinit_lines.append(
155 '%s %s:%d' % (remote_cmd, self.gdb_cross.host, self.gdbserver_port))
156 gdbinit_lines.append('set remote exec-file ' + self.binary)
157 gdbinit_lines.append(
158 'run ' + os.path.join(self.modified_recipe.d, self.binary))
159
160 GdbCrossConfig.write_file(self.gdbinit, gdbinit_lines)
161
162 def _gen_gdb_start_script(self):
163 """Generate a script starting GDB with the corresponding gdbinit configuration."""
164 cmd_lines = ['#!/bin/sh']
165 cmd_lines.append('cd ' + self.modified_recipe.real_srctree)
166 cmd_lines.append(self.gdb_cross.gdb + ' -ix ' +
167 self.gdbinit + ' "$@"')
168 GdbCrossConfig.write_file(self.gdb_script, cmd_lines, True)
169
170 def initialize(self):
171 self._gen_gdbserver_start_script()
172 self._gen_gdbinit_config()
173 self._gen_gdb_start_script()
174
175 @staticmethod
176 def write_file(script_file, cmd_lines, executable=False):
177 script_dir = os.path.dirname(script_file)
178 mkdirhier(script_dir)
179 with open(script_file, 'w') as script_f:
180 script_f.write(os.linesep.join(cmd_lines))
181 script_f.write(os.linesep)
182 if executable:
183 st = os.stat(script_file)
184 os.chmod(script_file, st.st_mode | stat.S_IEXEC)
185 logger.info("Created: %s" % script_file)
186
187
188class IdeBase:
189 """Base class defining the interface for IDE plugins"""
190
191 def __init__(self):
192 self.ide_name = 'undefined'
193 self.gdb_cross_configs = []
194
195 @classmethod
196 def ide_plugin_priority(cls):
197 """Used to find the default ide handler if --ide is not passed"""
198 return 10
199
200 def setup_shared_sysroots(self, shared_env):
201 logger.warn("Shared sysroot mode is not supported for IDE %s" %
202 self.ide_name)
203
204 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
205 logger.warn("Modified recipe mode is not supported for IDE %s" %
206 self.ide_name)
207
208 def initialize_gdb_cross_configs(self, image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfig):
209 binaries = modified_recipe.find_installed_binaries()
210 for binary in binaries:
211 gdb_cross_config = gdb_cross_config_class(
212 image_recipe, modified_recipe, binary)
213 gdb_cross_config.initialize()
214 self.gdb_cross_configs.append(gdb_cross_config)
215
216 @staticmethod
217 def gen_oe_scrtips_sym_link(modified_recipe):
218 # create a sym-link from sources to the scripts directory
219 if os.path.isdir(modified_recipe.ide_sdk_scripts_dir):
220 IdeBase.symlink_force(modified_recipe.ide_sdk_scripts_dir,
221 os.path.join(modified_recipe.real_srctree, 'oe-scripts'))
222
223 @staticmethod
224 def update_json_file(json_dir, json_file, update_dict):
225 """Update a json file
226
227 By default it uses the dict.update function. If this is not sutiable
228 the update function might be passed via update_func parameter.
229 """
230 json_path = os.path.join(json_dir, json_file)
231 logger.info("Updating IDE config file: %s (%s)" %
232 (json_file, json_path))
233 if not os.path.exists(json_dir):
234 os.makedirs(json_dir)
235 try:
236 with open(json_path) as f:
237 orig_dict = json.load(f)
238 except json.decoder.JSONDecodeError:
239 logger.info(
240 "Decoding %s failed. Probably because of comments in the json file" % json_path)
241 orig_dict = {}
242 except FileNotFoundError:
243 orig_dict = {}
244 orig_dict.update(update_dict)
245 with open(json_path, 'w') as f:
246 json.dump(orig_dict, f, indent=4)
247
248 @staticmethod
249 def symlink_force(tgt, dst):
250 try:
251 os.symlink(tgt, dst)
252 except OSError as err:
253 if err.errno == errno.EEXIST:
254 if os.readlink(dst) != tgt:
255 os.remove(dst)
256 os.symlink(tgt, dst)
257 else:
258 raise err
259
260
261def get_devtool_deploy_opts(args):
262 """Filter args for devtool deploy-target args"""
263 if not args.target:
264 return None
265 devtool_deploy_opts = [args.target]
266 if args.no_host_check:
267 devtool_deploy_opts += ["-c"]
268 if args.show_status:
269 devtool_deploy_opts += ["-s"]
270 if args.no_preserve:
271 devtool_deploy_opts += ["-p"]
272 if args.no_check_space:
273 devtool_deploy_opts += ["--no-check-space"]
274 if args.ssh_exec:
275 devtool_deploy_opts += ["-e", args.ssh.exec]
276 if args.port:
277 devtool_deploy_opts += ["-P", args.port]
278 if args.key:
279 devtool_deploy_opts += ["-I", args.key]
280 if args.strip is False:
281 devtool_deploy_opts += ["--no-strip"]
282 return devtool_deploy_opts
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py
new file mode 100644
index 0000000000..ee5bb57265
--- /dev/null
+++ b/scripts/lib/devtool/ide_plugins/ide_code.py
@@ -0,0 +1,462 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk IDE plugin for VSCode and VSCodium"""
7
8import json
9import logging
10import os
11import shutil
12from devtool.ide_plugins import BuildTool, IdeBase, GdbCrossConfig, get_devtool_deploy_opts
13
14logger = logging.getLogger('devtool')
15
16
17class GdbCrossConfigVSCode(GdbCrossConfig):
18 def __init__(self, image_recipe, modified_recipe, binary):
19 super().__init__(image_recipe, modified_recipe, binary, False)
20
21 def initialize(self):
22 self._gen_gdbserver_start_script()
23
24
25class IdeVSCode(IdeBase):
26 """Manage IDE configurations for VSCode
27
28 Modified recipe mode:
29 - cmake: use the cmake-preset generated by devtool ide-sdk
30 - meson: meson is called via a wrapper script generated by devtool ide-sdk
31
32 Shared sysroot mode:
33 In shared sysroot mode, the cross tool-chain is exported to the user's global configuration.
34 A workspace cannot be created because there is no recipe that defines how a workspace could
35 be set up.
36 - cmake: adds a cmake-kit to .local/share/CMakeTools/cmake-tools-kits.json
37 The cmake-kit uses the environment script and the tool-chain file
38 generated by meta-ide-support.
39 - meson: Meson needs manual workspace configuration.
40 """
41
42 @classmethod
43 def ide_plugin_priority(cls):
44 """If --ide is not passed this is the default plugin"""
45 if shutil.which('code'):
46 return 100
47 return 0
48
49 def setup_shared_sysroots(self, shared_env):
50 """Expose the toolchain of the shared sysroots SDK"""
51 datadir = shared_env.ide_support.datadir
52 deploy_dir_image = shared_env.ide_support.deploy_dir_image
53 real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
54 standalone_sysroot_native = shared_env.build_sysroots.standalone_sysroot_native
55 vscode_ws_path = os.path.join(
56 os.environ['HOME'], '.local', 'share', 'CMakeTools')
57 cmake_kits_path = os.path.join(vscode_ws_path, 'cmake-tools-kits.json')
58 oecmake_generator = "Ninja"
59 env_script = os.path.join(
60 deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
61
62 if not os.path.isdir(vscode_ws_path):
63 os.makedirs(vscode_ws_path)
64 cmake_kits_old = []
65 if os.path.exists(cmake_kits_path):
66 with open(cmake_kits_path, 'r', encoding='utf-8') as cmake_kits_file:
67 cmake_kits_old = json.load(cmake_kits_file)
68 cmake_kits = cmake_kits_old.copy()
69
70 cmake_kit_new = {
71 "name": "OE " + real_multimach_target_sys,
72 "environmentSetupScript": env_script,
73 "toolchainFile": standalone_sysroot_native + datadir + "/cmake/OEToolchainConfig.cmake",
74 "preferredGenerator": {
75 "name": oecmake_generator
76 }
77 }
78
79 def merge_kit(cmake_kits, cmake_kit_new):
80 i = 0
81 while i < len(cmake_kits):
82 if 'environmentSetupScript' in cmake_kits[i] and \
83 cmake_kits[i]['environmentSetupScript'] == cmake_kit_new['environmentSetupScript']:
84 cmake_kits[i] = cmake_kit_new
85 return
86 i += 1
87 cmake_kits.append(cmake_kit_new)
88 merge_kit(cmake_kits, cmake_kit_new)
89
90 if cmake_kits != cmake_kits_old:
91 logger.info("Updating: %s" % cmake_kits_path)
92 with open(cmake_kits_path, 'w', encoding='utf-8') as cmake_kits_file:
93 json.dump(cmake_kits, cmake_kits_file, indent=4)
94 else:
95 logger.info("Already up to date: %s" % cmake_kits_path)
96
97 cmake_native = os.path.join(
98 shared_env.build_sysroots.standalone_sysroot_native, 'usr', 'bin', 'cmake')
99 if os.path.isfile(cmake_native):
100 logger.info('cmake-kits call cmake by default. If the cmake provided by this SDK should be used, please add the following line to ".vscode/settings.json" file: "cmake.cmakePath": "%s"' % cmake_native)
101 else:
102 logger.error("Cannot find cmake native at: %s" % cmake_native)
103
104 def dot_code_dir(self, modified_recipe):
105 return os.path.join(modified_recipe.srctree, '.vscode')
106
107 def __vscode_settings_meson(self, settings_dict, modified_recipe):
108 if modified_recipe.build_tool is not BuildTool.MESON:
109 return
110 settings_dict["mesonbuild.mesonPath"] = modified_recipe.meson_wrapper
111
112 confopts = modified_recipe.mesonopts.split()
113 confopts += modified_recipe.meson_cross_file.split()
114 confopts += modified_recipe.extra_oemeson.split()
115 settings_dict["mesonbuild.configureOptions"] = confopts
116 settings_dict["mesonbuild.buildFolder"] = modified_recipe.b
117
118 def __vscode_settings_cmake(self, settings_dict, modified_recipe):
119 """Add cmake specific settings to settings.json.
120
121 Note: most settings are passed to the cmake preset.
122 """
123 if modified_recipe.build_tool is not BuildTool.CMAKE:
124 return
125 settings_dict["cmake.configureOnOpen"] = True
126 settings_dict["cmake.sourceDirectory"] = modified_recipe.real_srctree
127
128 def vscode_settings(self, modified_recipe, image_recipe):
129 files_excludes = {
130 "**/.git/**": True,
131 "**/oe-logs/**": True,
132 "**/oe-workdir/**": True,
133 "**/source-date-epoch/**": True
134 }
135 python_exclude = [
136 "**/.git/**",
137 "**/oe-logs/**",
138 "**/oe-workdir/**",
139 "**/source-date-epoch/**"
140 ]
141 files_readonly = {
142 modified_recipe.recipe_sysroot + '/**': True,
143 modified_recipe.recipe_sysroot_native + '/**': True,
144 }
145 if image_recipe.rootfs_dbg is not None:
146 files_readonly[image_recipe.rootfs_dbg + '/**'] = True
147 settings_dict = {
148 "files.watcherExclude": files_excludes,
149 "files.exclude": files_excludes,
150 "files.readonlyInclude": files_readonly,
151 "python.analysis.exclude": python_exclude
152 }
153 self.__vscode_settings_cmake(settings_dict, modified_recipe)
154 self.__vscode_settings_meson(settings_dict, modified_recipe)
155
156 settings_file = 'settings.json'
157 IdeBase.update_json_file(
158 self.dot_code_dir(modified_recipe), settings_file, settings_dict)
159
160 def __vscode_extensions_cmake(self, modified_recipe, recommendations):
161 if modified_recipe.build_tool is not BuildTool.CMAKE:
162 return
163 recommendations += [
164 "ms-vscode.cmake-tools",
165 "ms-vscode.cpptools",
166 "ms-vscode.cpptools-extension-pack",
167 "ms-vscode.cpptools-themes"
168 ]
169
170 def __vscode_extensions_meson(self, modified_recipe, recommendations):
171 if modified_recipe.build_tool is not BuildTool.MESON:
172 return
173 recommendations += [
174 'mesonbuild.mesonbuild',
175 "ms-vscode.cpptools",
176 "ms-vscode.cpptools-extension-pack",
177 "ms-vscode.cpptools-themes"
178 ]
179
180 def vscode_extensions(self, modified_recipe):
181 recommendations = []
182 self.__vscode_extensions_cmake(modified_recipe, recommendations)
183 self.__vscode_extensions_meson(modified_recipe, recommendations)
184 extensions_file = 'extensions.json'
185 IdeBase.update_json_file(
186 self.dot_code_dir(modified_recipe), extensions_file, {"recommendations": recommendations})
187
188 def vscode_c_cpp_properties(self, modified_recipe):
189 properties_dict = {
190 "name": modified_recipe.recipe_id_pretty,
191 }
192 if modified_recipe.build_tool is BuildTool.CMAKE:
193 properties_dict["configurationProvider"] = "ms-vscode.cmake-tools"
194 elif modified_recipe.build_tool is BuildTool.MESON:
195 properties_dict["configurationProvider"] = "mesonbuild.mesonbuild"
196 properties_dict["compilerPath"] = os.path.join(modified_recipe.staging_bindir_toolchain, modified_recipe.cxx.split()[0])
197 else: # no C/C++ build
198 return
199
200 properties_dicts = {
201 "configurations": [
202 properties_dict
203 ],
204 "version": 4
205 }
206 prop_file = 'c_cpp_properties.json'
207 IdeBase.update_json_file(
208 self.dot_code_dir(modified_recipe), prop_file, properties_dicts)
209
210 def vscode_launch_bin_dbg(self, gdb_cross_config):
211 modified_recipe = gdb_cross_config.modified_recipe
212
213 launch_config = {
214 "name": gdb_cross_config.id_pretty,
215 "type": "cppdbg",
216 "request": "launch",
217 "program": os.path.join(modified_recipe.d, gdb_cross_config.binary.lstrip('/')),
218 "stopAtEntry": True,
219 "cwd": "${workspaceFolder}",
220 "environment": [],
221 "externalConsole": False,
222 "MIMode": "gdb",
223 "preLaunchTask": gdb_cross_config.id_pretty,
224 "miDebuggerPath": modified_recipe.gdb_cross.gdb,
225 "miDebuggerServerAddress": "%s:%d" % (modified_recipe.gdb_cross.host, gdb_cross_config.gdbserver_port)
226 }
227
228 # Search for header files in recipe-sysroot.
229 src_file_map = {
230 "/usr/include": os.path.join(modified_recipe.recipe_sysroot, "usr", "include")
231 }
232 # First of all search for not stripped binaries in the image folder.
233 # These binaries are copied (and optionally stripped) by deploy-target
234 setup_commands = [
235 {
236 "description": "sysroot",
237 "text": "set sysroot " + modified_recipe.d
238 }
239 ]
240
241 if gdb_cross_config.image_recipe.rootfs_dbg:
242 launch_config['additionalSOLibSearchPath'] = modified_recipe.solib_search_path_str(
243 gdb_cross_config.image_recipe)
244 # First: Search for sources of this recipe in the workspace folder
245 if modified_recipe.pn in modified_recipe.target_dbgsrc_dir:
246 src_file_map[modified_recipe.target_dbgsrc_dir] = "${workspaceFolder}"
247 else:
248 logger.error(
249 "TARGET_DBGSRC_DIR must contain the recipe name PN.")
250 # Second: Search for sources of other recipes in the rootfs-dbg
251 if modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
252 src_file_map["/usr/src/debug"] = os.path.join(
253 gdb_cross_config.image_recipe.rootfs_dbg, "usr", "src", "debug")
254 else:
255 logger.error(
256 "TARGET_DBGSRC_DIR must start with /usr/src/debug.")
257 else:
258 logger.warning(
259 "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
260
261 launch_config['sourceFileMap'] = src_file_map
262 launch_config['setupCommands'] = setup_commands
263 return launch_config
264
265 def vscode_launch(self, modified_recipe):
266 """GDB Launch configuration for binaries (elf files)"""
267
268 configurations = []
269 for gdb_cross_config in self.gdb_cross_configs:
270 if gdb_cross_config.modified_recipe is modified_recipe:
271 configurations.append(self.vscode_launch_bin_dbg(gdb_cross_config))
272 launch_dict = {
273 "version": "0.2.0",
274 "configurations": configurations
275 }
276 launch_file = 'launch.json'
277 IdeBase.update_json_file(
278 self.dot_code_dir(modified_recipe), launch_file, launch_dict)
279
280 def vscode_tasks_cpp(self, args, modified_recipe):
281 run_install_deploy = modified_recipe.gen_install_deploy_script(args)
282 install_task_name = "install && deploy-target %s" % modified_recipe.recipe_id_pretty
283 tasks_dict = {
284 "version": "2.0.0",
285 "tasks": [
286 {
287 "label": install_task_name,
288 "type": "shell",
289 "command": run_install_deploy,
290 "problemMatcher": []
291 }
292 ]
293 }
294 for gdb_cross_config in self.gdb_cross_configs:
295 if gdb_cross_config.modified_recipe is not modified_recipe:
296 continue
297 tasks_dict['tasks'].append(
298 {
299 "label": gdb_cross_config.id_pretty,
300 "type": "shell",
301 "isBackground": True,
302 "dependsOn": [
303 install_task_name
304 ],
305 "command": gdb_cross_config.gdbserver_script,
306 "problemMatcher": [
307 {
308 "pattern": [
309 {
310 "regexp": ".",
311 "file": 1,
312 "location": 2,
313 "message": 3
314 }
315 ],
316 "background": {
317 "activeOnStart": True,
318 "beginsPattern": ".",
319 "endsPattern": ".",
320 }
321 }
322 ]
323 })
324 tasks_file = 'tasks.json'
325 IdeBase.update_json_file(
326 self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
327
328 def vscode_tasks_fallback(self, args, modified_recipe):
329 oe_init_dir = modified_recipe.oe_init_dir
330 oe_init = ". %s %s > /dev/null && " % (modified_recipe.oe_init_build_env, modified_recipe.topdir)
331 dt_build = "devtool build "
332 dt_build_label = dt_build + modified_recipe.recipe_id_pretty
333 dt_build_cmd = dt_build + modified_recipe.bpn
334 clean_opt = " --clean"
335 dt_build_clean_label = dt_build + modified_recipe.recipe_id_pretty + clean_opt
336 dt_build_clean_cmd = dt_build + modified_recipe.bpn + clean_opt
337 dt_deploy = "devtool deploy-target "
338 dt_deploy_label = dt_deploy + modified_recipe.recipe_id_pretty
339 dt_deploy_cmd = dt_deploy + modified_recipe.bpn
340 dt_build_deploy_label = "devtool build & deploy-target %s" % modified_recipe.recipe_id_pretty
341 deploy_opts = ' '.join(get_devtool_deploy_opts(args))
342 tasks_dict = {
343 "version": "2.0.0",
344 "tasks": [
345 {
346 "label": dt_build_label,
347 "type": "shell",
348 "command": "bash",
349 "linux": {
350 "options": {
351 "cwd": oe_init_dir
352 }
353 },
354 "args": [
355 "--login",
356 "-c",
357 "%s%s" % (oe_init, dt_build_cmd)
358 ],
359 "problemMatcher": []
360 },
361 {
362 "label": dt_deploy_label,
363 "type": "shell",
364 "command": "bash",
365 "linux": {
366 "options": {
367 "cwd": oe_init_dir
368 }
369 },
370 "args": [
371 "--login",
372 "-c",
373 "%s%s %s" % (
374 oe_init, dt_deploy_cmd, deploy_opts)
375 ],
376 "problemMatcher": []
377 },
378 {
379 "label": dt_build_deploy_label,
380 "dependsOrder": "sequence",
381 "dependsOn": [
382 dt_build_label,
383 dt_deploy_label
384 ],
385 "problemMatcher": [],
386 "group": {
387 "kind": "build",
388 "isDefault": True
389 }
390 },
391 {
392 "label": dt_build_clean_label,
393 "type": "shell",
394 "command": "bash",
395 "linux": {
396 "options": {
397 "cwd": oe_init_dir
398 }
399 },
400 "args": [
401 "--login",
402 "-c",
403 "%s%s" % (oe_init, dt_build_clean_cmd)
404 ],
405 "problemMatcher": []
406 }
407 ]
408 }
409 if modified_recipe.gdb_cross:
410 for gdb_cross_config in self.gdb_cross_configs:
411 if gdb_cross_config.modified_recipe is not modified_recipe:
412 continue
413 tasks_dict['tasks'].append(
414 {
415 "label": gdb_cross_config.id_pretty,
416 "type": "shell",
417 "isBackground": True,
418 "dependsOn": [
419 dt_build_deploy_label
420 ],
421 "command": gdb_cross_config.gdbserver_script,
422 "problemMatcher": [
423 {
424 "pattern": [
425 {
426 "regexp": ".",
427 "file": 1,
428 "location": 2,
429 "message": 3
430 }
431 ],
432 "background": {
433 "activeOnStart": True,
434 "beginsPattern": ".",
435 "endsPattern": ".",
436 }
437 }
438 ]
439 })
440 tasks_file = 'tasks.json'
441 IdeBase.update_json_file(
442 self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
443
444 def vscode_tasks(self, args, modified_recipe):
445 if modified_recipe.build_tool.is_c_ccp:
446 self.vscode_tasks_cpp(args, modified_recipe)
447 else:
448 self.vscode_tasks_fallback(args, modified_recipe)
449
450 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
451 self.vscode_settings(modified_recipe, image_recipe)
452 self.vscode_extensions(modified_recipe)
453 self.vscode_c_cpp_properties(modified_recipe)
454 if args.target:
455 self.initialize_gdb_cross_configs(
456 image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfigVSCode)
457 self.vscode_launch(modified_recipe)
458 self.vscode_tasks(args, modified_recipe)
459
460
461def register_ide_plugin(ide_plugins):
462 ide_plugins['code'] = IdeVSCode
diff --git a/scripts/lib/devtool/ide_plugins/ide_none.py b/scripts/lib/devtool/ide_plugins/ide_none.py
new file mode 100644
index 0000000000..f106c5a026
--- /dev/null
+++ b/scripts/lib/devtool/ide_plugins/ide_none.py
@@ -0,0 +1,53 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk generic IDE plugin"""
7
8import os
9import logging
10from devtool.ide_plugins import IdeBase, GdbCrossConfig
11
12logger = logging.getLogger('devtool')
13
14
15class IdeNone(IdeBase):
16 """Generate some generic helpers for other IDEs
17
18 Modified recipe mode:
19 Generate some helper scripts for remote debugging with GDB
20
21 Shared sysroot mode:
22 A wrapper for bitbake meta-ide-support and bitbake build-sysroots
23 """
24
25 def __init__(self):
26 super().__init__()
27
28 def setup_shared_sysroots(self, shared_env):
29 real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
30 deploy_dir_image = shared_env.ide_support.deploy_dir_image
31 env_script = os.path.join(
32 deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
33 logger.info(
34 "To use this SDK please source this: %s" % env_script)
35
36 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
37 """generate some helper scripts and config files
38
39 - Execute the do_install task
40 - Execute devtool deploy-target
41 - Generate a gdbinit file per executable
42 - Generate the oe-scripts sym-link
43 """
44 script_path = modified_recipe.gen_install_deploy_script(args)
45 logger.info("Created: %s" % script_path)
46
47 self.initialize_gdb_cross_configs(image_recipe, modified_recipe)
48
49 IdeBase.gen_oe_scrtips_sym_link(modified_recipe)
50
51
52def register_ide_plugin(ide_plugins):
53 ide_plugins['none'] = IdeNone
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py
new file mode 100755
index 0000000000..931408fa74
--- /dev/null
+++ b/scripts/lib/devtool/ide_sdk.py
@@ -0,0 +1,1009 @@
1# Development tool - ide-sdk command plugin
2#
3# Copyright (C) 2023-2024 Siemens AG
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool ide-sdk plugin"""
8
9import json
10import logging
11import os
12import re
13import shutil
14import stat
15import subprocess
16import sys
17from argparse import RawTextHelpFormatter
18from enum import Enum
19
20import scriptutils
21import bb
22from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError, parse_recipe
23from devtool.standard import get_real_srctree
24from devtool.ide_plugins import BuildTool
25
26
27logger = logging.getLogger('devtool')
28
29# dict of classes derived from IdeBase
30ide_plugins = {}
31
32
33class DevtoolIdeMode(Enum):
34 """Different modes are supported by the ide-sdk plugin.
35
36 The enum might be extended by more advanced modes in the future. Some ideas:
37 - auto: modified if all recipes are modified, shared if none of the recipes is modified.
38 - mixed: modified mode for modified recipes, shared mode for all other recipes.
39 """
40
41 modified = 'modified'
42 shared = 'shared'
43
44
45class TargetDevice:
46 """SSH remote login parameters"""
47
48 def __init__(self, args):
49 self.extraoptions = ''
50 if args.no_host_check:
51 self.extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
52 self.ssh_sshexec = 'ssh'
53 if args.ssh_exec:
54 self.ssh_sshexec = args.ssh_exec
55 self.ssh_port = ''
56 if args.port:
57 self.ssh_port = "-p %s" % args.port
58 if args.key:
59 self.extraoptions += ' -i %s' % args.key
60
61 self.target = args.target
62 target_sp = args.target.split('@')
63 if len(target_sp) == 1:
64 self.login = ""
65 self.host = target_sp[0]
66 elif len(target_sp) == 2:
67 self.login = target_sp[0]
68 self.host = target_sp[1]
69 else:
70 logger.error("Invalid target argument: %s" % args.target)
71
72
73class RecipeNative:
74 """Base class for calling bitbake to provide a -native recipe"""
75
76 def __init__(self, name, target_arch=None):
77 self.name = name
78 self.target_arch = target_arch
79 self.bootstrap_tasks = [self.name + ':do_addto_recipe_sysroot']
80 self.staging_bindir_native = None
81 self.target_sys = None
82 self.__native_bin = None
83
84 def _initialize(self, config, workspace, tinfoil):
85 """Get the parsed recipe"""
86 recipe_d = parse_recipe(
87 config, tinfoil, self.name, appends=True, filter_workspace=False)
88 if not recipe_d:
89 raise DevtoolError("Parsing %s recipe failed" % self.name)
90 self.staging_bindir_native = os.path.realpath(
91 recipe_d.getVar('STAGING_BINDIR_NATIVE'))
92 self.target_sys = recipe_d.getVar('TARGET_SYS')
93 return recipe_d
94
95 def initialize(self, config, workspace, tinfoil):
96 """Basic initialization that can be overridden by a derived class"""
97 self._initialize(config, workspace, tinfoil)
98
99 @property
100 def native_bin(self):
101 if not self.__native_bin:
102 raise DevtoolError("native binary name is not defined.")
103 return self.__native_bin
104
105
106class RecipeGdbCross(RecipeNative):
107 """Handle handle gdb-cross on the host and the gdbserver on the target device"""
108
109 def __init__(self, args, target_arch, target_device):
110 super().__init__('gdb-cross-' + target_arch, target_arch)
111 self.target_device = target_device
112 self.gdb = None
113 self.gdbserver_port_next = int(args.gdbserver_port_start)
114 self.config_db = {}
115
116 def __find_gdbserver(self, config, tinfoil):
117 """Absolute path of the gdbserver"""
118 recipe_d_gdb = parse_recipe(
119 config, tinfoil, 'gdb', appends=True, filter_workspace=False)
120 if not recipe_d_gdb:
121 raise DevtoolError("Parsing gdb recipe failed")
122 return os.path.join(recipe_d_gdb.getVar('bindir'), 'gdbserver')
123
124 def initialize(self, config, workspace, tinfoil):
125 super()._initialize(config, workspace, tinfoil)
126 gdb_bin = self.target_sys + '-gdb'
127 gdb_path = os.path.join(
128 self.staging_bindir_native, self.target_sys, gdb_bin)
129 self.gdb = gdb_path
130 self.gdbserver_path = self.__find_gdbserver(config, tinfoil)
131
132 @property
133 def host(self):
134 return self.target_device.host
135
136
137class RecipeImage:
138 """Handle some image recipe related properties
139
140 Most workflows require firmware that runs on the target device.
141 This firmware must be consistent with the setup of the host system.
142 In particular, the debug symbols must be compatible. For this, the
143 rootfs must be created as part of the SDK.
144 """
145
146 def __init__(self, name):
147 self.combine_dbg_image = False
148 self.gdbserver_missing = False
149 self.name = name
150 self.rootfs = None
151 self.__rootfs_dbg = None
152 self.bootstrap_tasks = [self.name + ':do_build']
153
154 def initialize(self, config, tinfoil):
155 image_d = parse_recipe(
156 config, tinfoil, self.name, appends=True, filter_workspace=False)
157 if not image_d:
158 raise DevtoolError(
159 "Parsing image recipe %s failed" % self.name)
160
161 self.combine_dbg_image = bb.data.inherits_class(
162 'image-combined-dbg', image_d)
163
164 workdir = image_d.getVar('WORKDIR')
165 self.rootfs = os.path.join(workdir, 'rootfs')
166 if image_d.getVar('IMAGE_GEN_DEBUGFS') == "1":
167 self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg')
168
169 self.gdbserver_missing = 'gdbserver' not in image_d.getVar(
170 'IMAGE_INSTALL') and 'tools-debug' not in image_d.getVar('IMAGE_FEATURES')
171
172 @property
173 def debug_support(self):
174 return bool(self.rootfs_dbg)
175
176 @property
177 def rootfs_dbg(self):
178 if self.__rootfs_dbg and os.path.isdir(self.__rootfs_dbg):
179 return self.__rootfs_dbg
180 return None
181
182
183class RecipeMetaIdeSupport:
184 """For the shared sysroots mode meta-ide-support is needed
185
186 For use cases where just a cross tool-chain is required but
187 no recipe is used, devtool ide-sdk abstracts calling bitbake meta-ide-support
188 and bitbake build-sysroots. This also allows to expose the cross-toolchains
189 to IDEs. For example VSCode support different tool-chains with e.g. cmake-kits.
190 """
191
192 def __init__(self):
193 self.bootstrap_tasks = ['meta-ide-support:do_build']
194 self.topdir = None
195 self.datadir = None
196 self.deploy_dir_image = None
197 self.build_sys = None
198 # From toolchain-scripts
199 self.real_multimach_target_sys = None
200
201 def initialize(self, config, tinfoil):
202 meta_ide_support_d = parse_recipe(
203 config, tinfoil, 'meta-ide-support', appends=True, filter_workspace=False)
204 if not meta_ide_support_d:
205 raise DevtoolError("Parsing meta-ide-support recipe failed")
206
207 self.topdir = meta_ide_support_d.getVar('TOPDIR')
208 self.datadir = meta_ide_support_d.getVar('datadir')
209 self.deploy_dir_image = meta_ide_support_d.getVar(
210 'DEPLOY_DIR_IMAGE')
211 self.build_sys = meta_ide_support_d.getVar('BUILD_SYS')
212 self.real_multimach_target_sys = meta_ide_support_d.getVar(
213 'REAL_MULTIMACH_TARGET_SYS')
214
215
216class RecipeBuildSysroots:
217 """For the shared sysroots mode build-sysroots is needed"""
218
219 def __init__(self):
220 self.standalone_sysroot = None
221 self.standalone_sysroot_native = None
222 self.bootstrap_tasks = [
223 'build-sysroots:do_build_target_sysroot',
224 'build-sysroots:do_build_native_sysroot'
225 ]
226
227 def initialize(self, config, tinfoil):
228 build_sysroots_d = parse_recipe(
229 config, tinfoil, 'build-sysroots', appends=True, filter_workspace=False)
230 if not build_sysroots_d:
231 raise DevtoolError("Parsing build-sysroots recipe failed")
232 self.standalone_sysroot = build_sysroots_d.getVar(
233 'STANDALONE_SYSROOT')
234 self.standalone_sysroot_native = build_sysroots_d.getVar(
235 'STANDALONE_SYSROOT_NATIVE')
236
237
238class SharedSysrootsEnv:
239 """Handle the shared sysroots based workflow
240
241 Support the workflow with just a tool-chain without a recipe.
242 It's basically like:
243 bitbake some-dependencies
244 bitbake meta-ide-support
245 bitbake build-sysroots
246 Use the environment-* file found in the deploy folder
247 """
248
249 def __init__(self):
250 self.ide_support = None
251 self.build_sysroots = None
252
253 def initialize(self, ide_support, build_sysroots):
254 self.ide_support = ide_support
255 self.build_sysroots = build_sysroots
256
257 def setup_ide(self, ide):
258 ide.setup(self)
259
260
261class RecipeNotModified:
262 """Handling of recipes added to the Direct DSK shared sysroots."""
263
264 def __init__(self, name):
265 self.name = name
266 self.bootstrap_tasks = [name + ':do_populate_sysroot']
267
268
269class RecipeModified:
270 """Handling of recipes in the workspace created by devtool modify"""
271 OE_INIT_BUILD_ENV = 'oe-init-build-env'
272
273 VALID_BASH_ENV_NAME_CHARS = re.compile(r"^[a-zA-Z0-9_]*$")
274
275 def __init__(self, name):
276 self.name = name
277 self.bootstrap_tasks = [name + ':do_install']
278 self.gdb_cross = None
279 # workspace
280 self.real_srctree = None
281 self.srctree = None
282 self.ide_sdk_dir = None
283 self.ide_sdk_scripts_dir = None
284 self.bbappend = None
285 # recipe variables from d.getVar
286 self.b = None
287 self.base_libdir = None
288 self.bblayers = None
289 self.bpn = None
290 self.d = None
291 self.debug_build = None
292 self.fakerootcmd = None
293 self.fakerootenv = None
294 self.libdir = None
295 self.max_process = None
296 self.package_arch = None
297 self.package_debug_split_style = None
298 self.path = None
299 self.pn = None
300 self.recipe_sysroot = None
301 self.recipe_sysroot_native = None
302 self.staging_incdir = None
303 self.strip_cmd = None
304 self.target_arch = None
305 self.target_dbgsrc_dir = None
306 self.topdir = None
307 self.workdir = None
308 self.recipe_id = None
309 # replicate bitbake build environment
310 self.exported_vars = None
311 self.cmd_compile = None
312 self.__oe_init_dir = None
313 # main build tool used by this recipe
314 self.build_tool = BuildTool.UNDEFINED
315 # build_tool = cmake
316 self.oecmake_generator = None
317 self.cmake_cache_vars = None
318 # build_tool = meson
319 self.meson_buildtype = None
320 self.meson_wrapper = None
321 self.mesonopts = None
322 self.extra_oemeson = None
323 self.meson_cross_file = None
324
325 def initialize(self, config, workspace, tinfoil):
326 recipe_d = parse_recipe(
327 config, tinfoil, self.name, appends=True, filter_workspace=False)
328 if not recipe_d:
329 raise DevtoolError("Parsing %s recipe failed" % self.name)
330
331 # Verify this recipe is built as externalsrc setup by devtool modify
332 workspacepn = check_workspace_recipe(
333 workspace, self.name, bbclassextend=True)
334 self.srctree = workspace[workspacepn]['srctree']
335 # Need to grab this here in case the source is within a subdirectory
336 self.real_srctree = get_real_srctree(
337 self.srctree, recipe_d.getVar('S'), recipe_d.getVar('UNPACKDIR'))
338 self.bbappend = workspace[workspacepn]['bbappend']
339
340 self.ide_sdk_dir = os.path.join(
341 config.workspace_path, 'ide-sdk', self.name)
342 if os.path.exists(self.ide_sdk_dir):
343 shutil.rmtree(self.ide_sdk_dir)
344 self.ide_sdk_scripts_dir = os.path.join(self.ide_sdk_dir, 'scripts')
345
346 self.b = recipe_d.getVar('B')
347 self.base_libdir = recipe_d.getVar('base_libdir')
348 self.bblayers = recipe_d.getVar('BBLAYERS').split()
349 self.bpn = recipe_d.getVar('BPN')
350 self.cxx = recipe_d.getVar('CXX')
351 self.d = recipe_d.getVar('D')
352 self.debug_build = recipe_d.getVar('DEBUG_BUILD')
353 self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD')
354 self.fakerootenv = recipe_d.getVar('FAKEROOTENV')
355 self.libdir = recipe_d.getVar('libdir')
356 self.max_process = int(recipe_d.getVar(
357 "BB_NUMBER_THREADS") or os.cpu_count() or 1)
358 self.package_arch = recipe_d.getVar('PACKAGE_ARCH')
359 self.package_debug_split_style = recipe_d.getVar(
360 'PACKAGE_DEBUG_SPLIT_STYLE')
361 self.path = recipe_d.getVar('PATH')
362 self.pn = recipe_d.getVar('PN')
363 self.recipe_sysroot = os.path.realpath(
364 recipe_d.getVar('RECIPE_SYSROOT'))
365 self.recipe_sysroot_native = os.path.realpath(
366 recipe_d.getVar('RECIPE_SYSROOT_NATIVE'))
367 self.staging_bindir_toolchain = os.path.realpath(
368 recipe_d.getVar('STAGING_BINDIR_TOOLCHAIN'))
369 self.staging_incdir = os.path.realpath(
370 recipe_d.getVar('STAGING_INCDIR'))
371 self.strip_cmd = recipe_d.getVar('STRIP')
372 self.target_arch = recipe_d.getVar('TARGET_ARCH')
373 self.target_dbgsrc_dir = recipe_d.getVar('TARGET_DBGSRC_DIR')
374 self.topdir = recipe_d.getVar('TOPDIR')
375 self.workdir = os.path.realpath(recipe_d.getVar('WORKDIR'))
376
377 self.__init_exported_variables(recipe_d)
378
379 if bb.data.inherits_class('cmake', recipe_d):
380 self.oecmake_generator = recipe_d.getVar('OECMAKE_GENERATOR')
381 self.__init_cmake_preset_cache(recipe_d)
382 self.build_tool = BuildTool.CMAKE
383 elif bb.data.inherits_class('meson', recipe_d):
384 self.meson_buildtype = recipe_d.getVar('MESON_BUILDTYPE')
385 self.mesonopts = recipe_d.getVar('MESONOPTS')
386 self.extra_oemeson = recipe_d.getVar('EXTRA_OEMESON')
387 self.meson_cross_file = recipe_d.getVar('MESON_CROSS_FILE')
388 self.build_tool = BuildTool.MESON
389
390 # Recipe ID is the identifier for IDE config sections
391 self.recipe_id = self.bpn + "-" + self.package_arch
392 self.recipe_id_pretty = self.bpn + ": " + self.package_arch
393
394 @staticmethod
395 def is_valid_shell_variable(var):
396 """Skip strange shell variables like systemd
397
398 prevent from strange bugs because of strange variables which
399 are not used in this context but break various tools.
400 """
401 if RecipeModified.VALID_BASH_ENV_NAME_CHARS.match(var):
402 bb.debug(1, "ignoring variable: %s" % var)
403 return True
404 return False
405
406 def solib_search_path(self, image):
407 """Search for debug symbols in the rootfs and rootfs-dbg
408
409 The debug symbols of shared libraries which are provided by other packages
410 are grabbed from the -dbg packages in the rootfs-dbg.
411
412 But most cross debugging tools like gdb, perf, and systemtap need to find
413 executable/library first and through it debuglink note find corresponding
414 symbols file. Therefore the library paths from the rootfs are added as well.
415
416 Note: For the devtool modified recipe compiled from the IDE, the debug
417 symbols are taken from the unstripped binaries in the image folder.
418 Also, devtool deploy-target takes the files from the image folder.
419 debug symbols in the image folder refer to the corresponding source files
420 with absolute paths of the build machine. Debug symbols found in the
421 rootfs-dbg are relocated and contain paths which refer to the source files
422 installed on the target device e.g. /usr/src/...
423 """
424 base_libdir = self.base_libdir.lstrip('/')
425 libdir = self.libdir.lstrip('/')
426 so_paths = [
427 # debug symbols for package_debug_split_style: debug-with-srcpkg or .debug
428 os.path.join(image.rootfs_dbg, base_libdir, ".debug"),
429 os.path.join(image.rootfs_dbg, libdir, ".debug"),
430 # debug symbols for package_debug_split_style: debug-file-directory
431 os.path.join(image.rootfs_dbg, "usr", "lib", "debug"),
432
433 # The binaries are required as well, the debug packages are not enough
434 # With image-combined-dbg.bbclass the binaries are copied into rootfs-dbg
435 os.path.join(image.rootfs_dbg, base_libdir),
436 os.path.join(image.rootfs_dbg, libdir),
437 # Without image-combined-dbg.bbclass the binaries are only in rootfs.
438 # Note: Stepping into source files located in rootfs-dbg does not
439 # work without image-combined-dbg.bbclass yet.
440 os.path.join(image.rootfs, base_libdir),
441 os.path.join(image.rootfs, libdir)
442 ]
443 return so_paths
444
445 def solib_search_path_str(self, image):
446 """Return a : separated list of paths usable by GDB's set solib-search-path"""
447 return ':'.join(self.solib_search_path(image))
448
449 def __init_exported_variables(self, d):
450 """Find all variables with export flag set.
451
452 This allows to generate IDE configurations which compile with the same
453 environment as bitbake does. That's at least a reasonable default behavior.
454 """
455 exported_vars = {}
456
457 vars = (key for key in d.keys() if not key.startswith(
458 "__") and not d.getVarFlag(key, "func", False))
459 for var in sorted(vars):
460 func = d.getVarFlag(var, "func", False)
461 if d.getVarFlag(var, 'python', False) and func:
462 continue
463 export = d.getVarFlag(var, "export", False)
464 unexport = d.getVarFlag(var, "unexport", False)
465 if not export and not unexport and not func:
466 continue
467 if unexport:
468 continue
469
470 val = d.getVar(var)
471 if val is None:
472 continue
473 if set(var) & set("-.{}+"):
474 logger.warn(
475 "Warning: Found invalid character in variable name %s", str(var))
476 continue
477 varExpanded = d.expand(var)
478 val = str(val)
479
480 if not RecipeModified.is_valid_shell_variable(varExpanded):
481 continue
482
483 if func:
484 code_line = "line: {0}, file: {1}\n".format(
485 d.getVarFlag(var, "lineno", False),
486 d.getVarFlag(var, "filename", False))
487 val = val.rstrip('\n')
488 logger.warn("Warning: exported shell function %s() is not exported (%s)" %
489 (varExpanded, code_line))
490 continue
491
492 if export:
493 exported_vars[varExpanded] = val.strip()
494 continue
495
496 self.exported_vars = exported_vars
497
498 def __init_cmake_preset_cache(self, d):
499 """Get the arguments passed to cmake
500
501 Replicate the cmake configure arguments with all details to
502 share on build folder between bitbake and SDK.
503 """
504 site_file = os.path.join(self.workdir, 'site-file.cmake')
505 if os.path.exists(site_file):
506 print("Warning: site-file.cmake is not supported")
507
508 cache_vars = {}
509 oecmake_args = d.getVar('OECMAKE_ARGS').split()
510 extra_oecmake = d.getVar('EXTRA_OECMAKE').split()
511 for param in sorted(oecmake_args + extra_oecmake):
512 d_pref = "-D"
513 if param.startswith(d_pref):
514 param = param[len(d_pref):]
515 else:
516 print("Error: expected a -D")
517 param_s = param.split('=', 1)
518 param_nt = param_s[0].split(':', 1)
519
520 def handle_undefined_variable(var):
521 if var.startswith('${') and var.endswith('}'):
522 return ''
523 else:
524 return var
525 # Example: FOO=ON
526 if len(param_nt) == 1:
527 cache_vars[param_s[0]] = handle_undefined_variable(param_s[1])
528 # Example: FOO:PATH=/tmp
529 elif len(param_nt) == 2:
530 cache_vars[param_nt[0]] = {
531 "type": param_nt[1],
532 "value": handle_undefined_variable(param_s[1]),
533 }
534 else:
535 print("Error: cannot parse %s" % param)
536 self.cmake_cache_vars = cache_vars
537
538 def cmake_preset(self):
539 """Create a preset for cmake that mimics how bitbake calls cmake"""
540 toolchain_file = os.path.join(self.workdir, 'toolchain.cmake')
541 cmake_executable = os.path.join(
542 self.recipe_sysroot_native, 'usr', 'bin', 'cmake')
543 self.cmd_compile = cmake_executable + " --build --preset " + self.recipe_id
544
545 preset_dict_configure = {
546 "name": self.recipe_id,
547 "displayName": self.recipe_id_pretty,
548 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
549 "binaryDir": self.b,
550 "generator": self.oecmake_generator,
551 "toolchainFile": toolchain_file,
552 "cacheVariables": self.cmake_cache_vars,
553 "environment": self.exported_vars,
554 "cmakeExecutable": cmake_executable
555 }
556
557 preset_dict_build = {
558 "name": self.recipe_id,
559 "displayName": self.recipe_id_pretty,
560 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
561 "configurePreset": self.recipe_id,
562 "inheritConfigureEnvironment": True
563 }
564
565 preset_dict_test = {
566 "name": self.recipe_id,
567 "displayName": self.recipe_id_pretty,
568 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
569 "configurePreset": self.recipe_id,
570 "inheritConfigureEnvironment": True
571 }
572
573 preset_dict = {
574 "version": 3, # cmake 3.21, backward compatible with kirkstone
575 "configurePresets": [preset_dict_configure],
576 "buildPresets": [preset_dict_build],
577 "testPresets": [preset_dict_test]
578 }
579
580 # Finally write the json file
581 json_file = 'CMakeUserPresets.json'
582 json_path = os.path.join(self.real_srctree, json_file)
583 logger.info("Updating CMake preset: %s (%s)" % (json_file, json_path))
584 if not os.path.exists(self.real_srctree):
585 os.makedirs(self.real_srctree)
586 try:
587 with open(json_path) as f:
588 orig_dict = json.load(f)
589 except json.decoder.JSONDecodeError:
590 logger.info(
591 "Decoding %s failed. Probably because of comments in the json file" % json_path)
592 orig_dict = {}
593 except FileNotFoundError:
594 orig_dict = {}
595
596 # Add or update the presets for the recipe and keep other presets
597 for k, v in preset_dict.items():
598 if isinstance(v, list):
599 update_preset = v[0]
600 preset_added = False
601 if k in orig_dict:
602 for index, orig_preset in enumerate(orig_dict[k]):
603 if 'name' in orig_preset:
604 if orig_preset['name'] == update_preset['name']:
605 logger.debug("Updating preset: %s" %
606 orig_preset['name'])
607 orig_dict[k][index] = update_preset
608 preset_added = True
609 break
610 else:
611 logger.debug("keeping preset: %s" %
612 orig_preset['name'])
613 else:
614 logger.warn("preset without a name found")
615 if not preset_added:
616 if not k in orig_dict:
617 orig_dict[k] = []
618 orig_dict[k].append(update_preset)
619 logger.debug("Added preset: %s" %
620 update_preset['name'])
621 else:
622 orig_dict[k] = v
623
624 with open(json_path, 'w') as f:
625 json.dump(orig_dict, f, indent=4)
626
627 def gen_meson_wrapper(self):
628 """Generate a wrapper script to call meson with the cross environment"""
629 bb.utils.mkdirhier(self.ide_sdk_scripts_dir)
630 meson_wrapper = os.path.join(self.ide_sdk_scripts_dir, 'meson')
631 meson_real = os.path.join(
632 self.recipe_sysroot_native, 'usr', 'bin', 'meson.real')
633 with open(meson_wrapper, 'w') as mwrap:
634 mwrap.write("#!/bin/sh" + os.linesep)
635 for var, val in self.exported_vars.items():
636 mwrap.write('export %s="%s"' % (var, val) + os.linesep)
637 mwrap.write("unset CC CXX CPP LD AR NM STRIP" + os.linesep)
638 private_temp = os.path.join(self.b, "meson-private", "tmp")
639 mwrap.write('mkdir -p "%s"' % private_temp + os.linesep)
640 mwrap.write('export TMPDIR="%s"' % private_temp + os.linesep)
641 mwrap.write('exec "%s" "$@"' % meson_real + os.linesep)
642 st = os.stat(meson_wrapper)
643 os.chmod(meson_wrapper, st.st_mode | stat.S_IEXEC)
644 self.meson_wrapper = meson_wrapper
645 self.cmd_compile = meson_wrapper + " compile -C " + self.b
646
647 def which(self, executable):
648 bin_path = shutil.which(executable, path=self.path)
649 if not bin_path:
650 raise DevtoolError(
651 'Cannot find %s. Probably the recipe %s is not built yet.' % (executable, self.bpn))
652 return bin_path
653
654 @staticmethod
655 def is_elf_file(file_path):
656 with open(file_path, "rb") as f:
657 data = f.read(4)
658 if data == b'\x7fELF':
659 return True
660 return False
661
662 def find_installed_binaries(self):
663 """find all executable elf files in the image directory"""
664 binaries = []
665 d_len = len(self.d)
666 re_so = re.compile(r'.*\.so[.0-9]*$')
667 for root, _, files in os.walk(self.d, followlinks=False):
668 for file in files:
669 if os.path.islink(file):
670 continue
671 if re_so.match(file):
672 continue
673 abs_name = os.path.join(root, file)
674 if os.access(abs_name, os.X_OK) and RecipeModified.is_elf_file(abs_name):
675 binaries.append(abs_name[d_len:])
676 return sorted(binaries)
677
678 def gen_deploy_target_script(self, args):
679 """Generate a script which does what devtool deploy-target does
680
681 This script is much quicker than devtool target-deploy. Because it
682 does not need to start a bitbake server. All information from tinfoil
683 is hard-coded in the generated script.
684 """
685 cmd_lines = ['#!%s' % str(sys.executable)]
686 cmd_lines.append('import sys')
687 cmd_lines.append('devtool_sys_path = %s' % str(sys.path))
688 cmd_lines.append('devtool_sys_path.reverse()')
689 cmd_lines.append('for p in devtool_sys_path:')
690 cmd_lines.append(' if p not in sys.path:')
691 cmd_lines.append(' sys.path.insert(0, p)')
692 cmd_lines.append('from devtool.deploy import deploy_no_d')
693 args_filter = ['debug', 'dry_run', 'key', 'no_check_space', 'no_host_check',
694 'no_preserve', 'port', 'show_status', 'ssh_exec', 'strip', 'target']
695 filtered_args_dict = {key: value for key, value in vars(
696 args).items() if key in args_filter}
697 cmd_lines.append('filtered_args_dict = %s' % str(filtered_args_dict))
698 cmd_lines.append('class Dict2Class(object):')
699 cmd_lines.append(' def __init__(self, my_dict):')
700 cmd_lines.append(' for key in my_dict:')
701 cmd_lines.append(' setattr(self, key, my_dict[key])')
702 cmd_lines.append('filtered_args = Dict2Class(filtered_args_dict)')
703 cmd_lines.append(
704 'setattr(filtered_args, "recipename", "%s")' % self.bpn)
705 cmd_lines.append('deploy_no_d("%s", "%s", "%s", "%s", "%s", "%s", %d, "%s", "%s", filtered_args)' %
706 (self.d, self.workdir, self.path, self.strip_cmd,
707 self.libdir, self.base_libdir, self.max_process,
708 self.fakerootcmd, self.fakerootenv))
709 return self.write_script(cmd_lines, 'deploy_target')
710
711 def gen_install_deploy_script(self, args):
712 """Generate a script which does install and deploy"""
713 cmd_lines = ['#!/bin/bash']
714
715 # . oe-init-build-env $BUILDDIR
716 # Note: Sourcing scripts with arguments requires bash
717 cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % (
718 self.oe_init_dir, self.oe_init_dir))
719 cmd_lines.append('. "%s" "%s" || { echo ". %s %s failed"; exit 1; }' % (
720 self.oe_init_build_env, self.topdir, self.oe_init_build_env, self.topdir))
721
722 # bitbake -c install
723 cmd_lines.append(
724 'bitbake %s -c install --force || { echo "bitbake %s -c install --force failed"; exit 1; }' % (self.bpn, self.bpn))
725
726 # Self contained devtool deploy-target
727 cmd_lines.append(self.gen_deploy_target_script(args))
728
729 return self.write_script(cmd_lines, 'install_and_deploy')
730
731 def write_script(self, cmd_lines, script_name):
732 bb.utils.mkdirhier(self.ide_sdk_scripts_dir)
733 script_name_arch = script_name + '_' + self.recipe_id
734 script_file = os.path.join(self.ide_sdk_scripts_dir, script_name_arch)
735 with open(script_file, 'w') as script_f:
736 script_f.write(os.linesep.join(cmd_lines))
737 st = os.stat(script_file)
738 os.chmod(script_file, st.st_mode | stat.S_IEXEC)
739 return script_file
740
741 @property
742 def oe_init_build_env(self):
743 """Find the oe-init-build-env used for this setup"""
744 oe_init_dir = self.oe_init_dir
745 if oe_init_dir:
746 return os.path.join(oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV)
747 return None
748
749 @property
750 def oe_init_dir(self):
751 """Find the directory where the oe-init-build-env is located
752
753 Assumption: There might be a layer with higher priority than poky
754 which provides to oe-init-build-env in the layer's toplevel folder.
755 """
756 if not self.__oe_init_dir:
757 for layer in reversed(self.bblayers):
758 result = subprocess.run(
759 ['git', 'rev-parse', '--show-toplevel'], cwd=layer, capture_output=True)
760 if result.returncode == 0:
761 oe_init_dir = result.stdout.decode('utf-8').strip()
762 oe_init_path = os.path.join(
763 oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV)
764 if os.path.exists(oe_init_path):
765 logger.debug("Using %s from: %s" % (
766 RecipeModified.OE_INIT_BUILD_ENV, oe_init_path))
767 self.__oe_init_dir = oe_init_dir
768 break
769 if not self.__oe_init_dir:
770 logger.error("Cannot find the bitbake top level folder")
771 return self.__oe_init_dir
772
773
774def ide_setup(args, config, basepath, workspace):
775 """Generate the IDE configuration for the workspace"""
776
777 # Explicitely passing some special recipes does not make sense
778 for recipe in args.recipenames:
779 if recipe in ['meta-ide-support', 'build-sysroots']:
780 raise DevtoolError("Invalid recipe: %s." % recipe)
781
782 # Collect information about tasks which need to be bitbaked
783 bootstrap_tasks = []
784 bootstrap_tasks_late = []
785 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
786 try:
787 # define mode depending on recipes which need to be processed
788 recipes_image_names = []
789 recipes_modified_names = []
790 recipes_other_names = []
791 for recipe in args.recipenames:
792 try:
793 check_workspace_recipe(
794 workspace, recipe, bbclassextend=True)
795 recipes_modified_names.append(recipe)
796 except DevtoolError:
797 recipe_d = parse_recipe(
798 config, tinfoil, recipe, appends=True, filter_workspace=False)
799 if not recipe_d:
800 raise DevtoolError("Parsing recipe %s failed" % recipe)
801 if bb.data.inherits_class('image', recipe_d):
802 recipes_image_names.append(recipe)
803 else:
804 recipes_other_names.append(recipe)
805
806 invalid_params = False
807 if args.mode == DevtoolIdeMode.shared:
808 if len(recipes_modified_names):
809 logger.error("In shared sysroots mode modified recipes %s cannot be handled." % str(
810 recipes_modified_names))
811 invalid_params = True
812 if args.mode == DevtoolIdeMode.modified:
813 if len(recipes_other_names):
814 logger.error("Only in shared sysroots mode not modified recipes %s can be handled." % str(
815 recipes_other_names))
816 invalid_params = True
817 if len(recipes_image_names) != 1:
818 logger.error(
819 "One image recipe is required as the rootfs for the remote development.")
820 invalid_params = True
821 for modified_recipe_name in recipes_modified_names:
822 if modified_recipe_name.startswith('nativesdk-') or modified_recipe_name.endswith('-native'):
823 logger.error(
824 "Only cross compiled recipes are support. %s is not cross." % modified_recipe_name)
825 invalid_params = True
826
827 if invalid_params:
828 raise DevtoolError("Invalid parameters are passed.")
829
830 # For the shared sysroots mode, add all dependencies of all the images to the sysroots
831 # For the modified mode provide one rootfs and the corresponding debug symbols via rootfs-dbg
832 recipes_images = []
833 for recipes_image_name in recipes_image_names:
834 logger.info("Using image: %s" % recipes_image_name)
835 recipe_image = RecipeImage(recipes_image_name)
836 recipe_image.initialize(config, tinfoil)
837 bootstrap_tasks += recipe_image.bootstrap_tasks
838 recipes_images.append(recipe_image)
839
840 # Provide a Direct SDK with shared sysroots
841 recipes_not_modified = []
842 if args.mode == DevtoolIdeMode.shared:
843 ide_support = RecipeMetaIdeSupport()
844 ide_support.initialize(config, tinfoil)
845 bootstrap_tasks += ide_support.bootstrap_tasks
846
847 logger.info("Adding %s to the Direct SDK sysroots." %
848 str(recipes_other_names))
849 for recipe_name in recipes_other_names:
850 recipe_not_modified = RecipeNotModified(recipe_name)
851 bootstrap_tasks += recipe_not_modified.bootstrap_tasks
852 recipes_not_modified.append(recipe_not_modified)
853
854 build_sysroots = RecipeBuildSysroots()
855 build_sysroots.initialize(config, tinfoil)
856 bootstrap_tasks_late += build_sysroots.bootstrap_tasks
857 shared_env = SharedSysrootsEnv()
858 shared_env.initialize(ide_support, build_sysroots)
859
860 recipes_modified = []
861 if args.mode == DevtoolIdeMode.modified:
862 logger.info("Setting up workspaces for modified recipe: %s" %
863 str(recipes_modified_names))
864 gdbs_cross = {}
865 for recipe_name in recipes_modified_names:
866 recipe_modified = RecipeModified(recipe_name)
867 recipe_modified.initialize(config, workspace, tinfoil)
868 bootstrap_tasks += recipe_modified.bootstrap_tasks
869 recipes_modified.append(recipe_modified)
870
871 if recipe_modified.target_arch not in gdbs_cross:
872 target_device = TargetDevice(args)
873 gdb_cross = RecipeGdbCross(
874 args, recipe_modified.target_arch, target_device)
875 gdb_cross.initialize(config, workspace, tinfoil)
876 bootstrap_tasks += gdb_cross.bootstrap_tasks
877 gdbs_cross[recipe_modified.target_arch] = gdb_cross
878 recipe_modified.gdb_cross = gdbs_cross[recipe_modified.target_arch]
879
880 finally:
881 tinfoil.shutdown()
882
883 if not args.skip_bitbake:
884 bb_cmd = 'bitbake '
885 if args.bitbake_k:
886 bb_cmd += "-k "
887 bb_cmd_early = bb_cmd + ' '.join(bootstrap_tasks)
888 exec_build_env_command(
889 config.init_path, basepath, bb_cmd_early, watch=True)
890 if bootstrap_tasks_late:
891 bb_cmd_late = bb_cmd + ' '.join(bootstrap_tasks_late)
892 exec_build_env_command(
893 config.init_path, basepath, bb_cmd_late, watch=True)
894
895 for recipe_image in recipes_images:
896 if (recipe_image.gdbserver_missing):
897 logger.warning(
898 "gdbserver not installed in image %s. Remote debugging will not be available" % recipe_image)
899
900 if recipe_image.combine_dbg_image is False:
901 logger.warning(
902 'IMAGE_CLASSES += "image-combined-dbg" is missing for image %s. Remote debugging will not find debug symbols from rootfs-dbg.' % recipe_image)
903
904 # Instantiate the active IDE plugin
905 ide = ide_plugins[args.ide]()
906 if args.mode == DevtoolIdeMode.shared:
907 ide.setup_shared_sysroots(shared_env)
908 elif args.mode == DevtoolIdeMode.modified:
909 for recipe_modified in recipes_modified:
910 if recipe_modified.build_tool is BuildTool.CMAKE:
911 recipe_modified.cmake_preset()
912 if recipe_modified.build_tool is BuildTool.MESON:
913 recipe_modified.gen_meson_wrapper()
914 ide.setup_modified_recipe(
915 args, recipe_image, recipe_modified)
916
917 if recipe_modified.debug_build != '1':
918 logger.warn(
919 'Recipe %s is compiled with release build configuration. '
920 'You might want to add DEBUG_BUILD = "1" to %s. '
921 'Note that devtool modify --debug-build can do this automatically.',
922 recipe_modified.name, recipe_modified.bbappend)
923 else:
924 raise DevtoolError("Must not end up here.")
925
926
927def register_commands(subparsers, context):
928 """Register devtool subcommands from this plugin"""
929
930 # The ide-sdk command bootstraps the SDK from the bitbake environment before the IDE
931 # configuration is generated. In the case of the eSDK, the bootstrapping is performed
932 # during the installation of the eSDK installer. Running the ide-sdk plugin from an
933 # eSDK installer-based setup would require skipping the bootstrapping and probably
934 # taking some other differences into account when generating the IDE configurations.
935 # This would be possible. But it is not implemented.
936 if context.fixed_setup:
937 return
938
939 global ide_plugins
940
941 # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins.
942 pluginpaths = [os.path.join(path, 'ide_plugins')
943 for path in context.pluginpaths]
944 ide_plugin_modules = []
945 for pluginpath in pluginpaths:
946 scriptutils.load_plugins(logger, ide_plugin_modules, pluginpath)
947
948 for ide_plugin_module in ide_plugin_modules:
949 if hasattr(ide_plugin_module, 'register_ide_plugin'):
950 ide_plugin_module.register_ide_plugin(ide_plugins)
951 # Sort plugins according to their priority. The first entry is the default IDE plugin.
952 ide_plugins = dict(sorted(ide_plugins.items(),
953 key=lambda p: p[1].ide_plugin_priority(), reverse=True))
954
955 parser_ide_sdk = subparsers.add_parser('ide-sdk', group='working', order=50, formatter_class=RawTextHelpFormatter,
956 help='Setup the SDK and configure the IDE')
957 parser_ide_sdk.add_argument(
958 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n'
959 'Depending on the --mode parameter different types of SDKs and IDE configurations are generated.')
960 parser_ide_sdk.add_argument(
961 '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified,
962 help='Different SDK types are supported:\n'
963 '- "' + DevtoolIdeMode.modified.name + '" (default):\n'
964 ' devtool modify creates a workspace to work on the source code of a recipe.\n'
965 ' devtool ide-sdk builds the SDK and generates the IDE configuration(s) in the workspace directorie(s)\n'
966 ' Usage example:\n'
967 ' devtool modify cmake-example\n'
968 ' devtool ide-sdk cmake-example core-image-minimal\n'
969 ' Start the IDE in the workspace folder\n'
970 ' At least one devtool modified recipe plus one image recipe are required:\n'
971 ' The image recipe is used to generate the target image and the remote debug configuration.\n'
972 '- "' + DevtoolIdeMode.shared.name + '":\n'
973 ' Usage example:\n'
974 ' devtool ide-sdk -m ' + DevtoolIdeMode.shared.name + ' recipe(s)\n'
975 ' This command generates a cross-toolchain as well as the corresponding shared sysroot directories.\n'
976 ' To use this tool-chain the environment-* file found in the deploy..image folder needs to be sourced into a shell.\n'
977 ' In case of VSCode and cmake the tool-chain is also exposed as a cmake-kit')
978 default_ide = list(ide_plugins.keys())[0]
979 parser_ide_sdk.add_argument(
980 '-i', '--ide', choices=ide_plugins.keys(), default=default_ide,
981 help='Setup the configuration for this IDE (default: %s)' % default_ide)
982 parser_ide_sdk.add_argument(
983 '-t', '--target', default='root@192.168.7.2',
984 help='Live target machine running an ssh server: user@hostname.')
985 parser_ide_sdk.add_argument(
986 '-G', '--gdbserver-port-start', default="1234", help='port where gdbserver is listening.')
987 parser_ide_sdk.add_argument(
988 '-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
989 parser_ide_sdk.add_argument(
990 '-e', '--ssh-exec', help='Executable to use in place of ssh')
991 parser_ide_sdk.add_argument(
992 '-P', '--port', help='Specify ssh port to use for connection to the target')
993 parser_ide_sdk.add_argument(
994 '-I', '--key', help='Specify ssh private key for connection to the target')
995 parser_ide_sdk.add_argument(
996 '--skip-bitbake', help='Generate IDE configuration but skip calling bitbake to update the SDK', action='store_true')
997 parser_ide_sdk.add_argument(
998 '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true')
999 parser_ide_sdk.add_argument(
1000 '--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
1001 parser_ide_sdk.add_argument(
1002 '-n', '--dry-run', help='List files to be undeployed only', action='store_true')
1003 parser_ide_sdk.add_argument(
1004 '-s', '--show-status', help='Show progress/status output', action='store_true')
1005 parser_ide_sdk.add_argument(
1006 '-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
1007 parser_ide_sdk.add_argument(
1008 '--no-check-space', help='Do not check for available space before deploying', action='store_true')
1009 parser_ide_sdk.set_defaults(func=ide_setup)
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py
index 95384c5333..1054960551 100644
--- a/scripts/lib/devtool/menuconfig.py
+++ b/scripts/lib/devtool/menuconfig.py
@@ -3,6 +3,8 @@
3# Copyright (C) 2018 Xilinx 3# Copyright (C) 2018 Xilinx
4# Written by: Chandana Kalluri <ckalluri@xilinx.com> 4# Written by: Chandana Kalluri <ckalluri@xilinx.com>
5# 5#
6# SPDX-License-Identifier: MIT
7#
6# This program is free software; you can redistribute it and/or modify 8# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as 9# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation. 10# published by the Free Software Foundation.
@@ -21,9 +23,6 @@
21import os 23import os
22import bb 24import bb
23import logging 25import logging
24import argparse
25import re
26import glob
27from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command 26from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command
28from devtool import check_workspace_recipe 27from devtool import check_workspace_recipe
29logger = logging.getLogger('devtool') 28logger = logging.getLogger('devtool')
@@ -32,7 +31,6 @@ def menuconfig(args, config, basepath, workspace):
32 """Entry point for the devtool 'menuconfig' subcommand""" 31 """Entry point for the devtool 'menuconfig' subcommand"""
33 32
34 rd = "" 33 rd = ""
35 kconfigpath = ""
36 pn_src = "" 34 pn_src = ""
37 localfilesdir = "" 35 localfilesdir = ""
38 workspace_dir = "" 36 workspace_dir = ""
@@ -43,13 +41,12 @@ def menuconfig(args, config, basepath, workspace):
43 return 1 41 return 1
44 42
45 check_workspace_recipe(workspace, args.component) 43 check_workspace_recipe(workspace, args.component)
46 pn = rd.getVar('PN', True) 44 pn = rd.getVar('PN')
47 45
48 if not rd.getVarFlag('do_menuconfig','task'): 46 if not rd.getVarFlag('do_menuconfig','task'):
49 raise DevtoolError("This recipe does not support menuconfig option") 47 raise DevtoolError("This recipe does not support menuconfig option")
50 48
51 workspace_dir = os.path.join(config.workspace_path,'sources') 49 workspace_dir = os.path.join(config.workspace_path,'sources')
52 kconfigpath = rd.getVar('B')
53 pn_src = os.path.join(workspace_dir,pn) 50 pn_src = os.path.join(workspace_dir,pn)
54 51
55 # add check to see if oe_local_files exists or not 52 # add check to see if oe_local_files exists or not
@@ -68,7 +65,7 @@ def menuconfig(args, config, basepath, workspace):
68 logger.info('Launching menuconfig') 65 logger.info('Launching menuconfig')
69 exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) 66 exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True)
70 fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg') 67 fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg')
71 res = standard._create_kconfig_diff(pn_src,rd,fragment) 68 standard._create_kconfig_diff(pn_src,rd,fragment)
72 69
73 return 0 70 return 0
74 71
diff --git a/scripts/lib/devtool/sdk.py b/scripts/lib/devtool/sdk.py
index ae3fc4caf9..9aefd7e354 100644
--- a/scripts/lib/devtool/sdk.py
+++ b/scripts/lib/devtool/sdk.py
@@ -207,7 +207,7 @@ def sdk_update(args, config, basepath, workspace):
207 if not sstate_mirrors: 207 if not sstate_mirrors:
208 with open(os.path.join(conf_dir, 'site.conf'), 'a') as f: 208 with open(os.path.join(conf_dir, 'site.conf'), 'a') as f:
209 f.write('SCONF_VERSION = "%s"\n' % site_conf_version) 209 f.write('SCONF_VERSION = "%s"\n' % site_conf_version)
210 f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver) 210 f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH"\n' % updateserver)
211 finally: 211 finally:
212 shutil.rmtree(tmpsdk_dir) 212 shutil.rmtree(tmpsdk_dir)
213 213
@@ -300,7 +300,8 @@ def sdk_install(args, config, basepath, workspace):
300 return 2 300 return 2
301 301
302 try: 302 try:
303 exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots', watch=True) 303 exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_native_sysroot', watch=True)
304 exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_target_sysroot', watch=True)
304 except bb.process.ExecutionError as e: 305 except bb.process.ExecutionError as e:
305 raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e))) 306 raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e)))
306 307
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py
index 01fb5ad96f..1fd5947c41 100644
--- a/scripts/lib/devtool/standard.py
+++ b/scripts/lib/devtool/standard.py
@@ -18,11 +18,13 @@ import argparse_oe
18import scriptutils 18import scriptutils
19import errno 19import errno
20import glob 20import glob
21import filecmp
22from collections import OrderedDict 21from collections import OrderedDict
22
23from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError 23from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError
24from devtool import parse_recipe 24from devtool import parse_recipe
25 25
26import bb.utils
27
26logger = logging.getLogger('devtool') 28logger = logging.getLogger('devtool')
27 29
28override_branch_prefix = 'devtool-override-' 30override_branch_prefix = 'devtool-override-'
@@ -30,7 +32,8 @@ override_branch_prefix = 'devtool-override-'
30 32
31def add(args, config, basepath, workspace): 33def add(args, config, basepath, workspace):
32 """Entry point for the devtool 'add' subcommand""" 34 """Entry point for the devtool 'add' subcommand"""
33 import bb 35 import bb.data
36 import bb.process
34 import oe.recipeutils 37 import oe.recipeutils
35 38
36 if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: 39 if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri:
@@ -147,6 +150,8 @@ def add(args, config, basepath, workspace):
147 extracmdopts += ' -a' 150 extracmdopts += ' -a'
148 if args.npm_dev: 151 if args.npm_dev:
149 extracmdopts += ' --npm-dev' 152 extracmdopts += ' --npm-dev'
153 if args.no_pypi:
154 extracmdopts += ' --no-pypi'
150 if args.mirrors: 155 if args.mirrors:
151 extracmdopts += ' --mirrors' 156 extracmdopts += ' --mirrors'
152 if args.srcrev: 157 if args.srcrev:
@@ -204,7 +209,7 @@ def add(args, config, basepath, workspace):
204 for fn in os.listdir(tempdir): 209 for fn in os.listdir(tempdir):
205 shutil.move(os.path.join(tempdir, fn), recipedir) 210 shutil.move(os.path.join(tempdir, fn), recipedir)
206 else: 211 else:
207 raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout)) 212 raise DevtoolError(f'Failed to create a recipe file for source {source}')
208 attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) 213 attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile))
209 if os.path.exists(attic_recipe): 214 if os.path.exists(attic_recipe):
210 logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) 215 logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe)
@@ -234,10 +239,14 @@ def add(args, config, basepath, workspace):
234 if args.fetchuri and not args.no_git: 239 if args.fetchuri and not args.no_git:
235 setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data) 240 setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data)
236 241
237 initial_rev = None 242 initial_rev = {}
238 if os.path.exists(os.path.join(srctree, '.git')): 243 if os.path.exists(os.path.join(srctree, '.git')):
239 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) 244 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
240 initial_rev = stdout.rstrip() 245 initial_rev["."] = stdout.rstrip()
246 (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse HEAD` $PWD\'', cwd=srctree)
247 for line in stdout.splitlines():
248 (rev, submodule) = line.split()
249 initial_rev[os.path.relpath(submodule, srctree)] = rev
241 250
242 if args.src_subdir: 251 if args.src_subdir:
243 srctree = os.path.join(srctree, args.src_subdir) 252 srctree = os.path.join(srctree, args.src_subdir)
@@ -251,7 +260,8 @@ def add(args, config, basepath, workspace):
251 if b_is_s: 260 if b_is_s:
252 f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) 261 f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree)
253 if initial_rev: 262 if initial_rev:
254 f.write('\n# initial_rev: %s\n' % initial_rev) 263 for key, value in initial_rev.items():
264 f.write('\n# initial_rev %s: %s\n' % (key, value))
255 265
256 if args.binary: 266 if args.binary:
257 f.write('do_install:append() {\n') 267 f.write('do_install:append() {\n')
@@ -298,6 +308,7 @@ def add(args, config, basepath, workspace):
298 308
299def _check_compatible_recipe(pn, d): 309def _check_compatible_recipe(pn, d):
300 """Check if the recipe is supported by devtool""" 310 """Check if the recipe is supported by devtool"""
311 import bb.data
301 if pn == 'perf': 312 if pn == 'perf':
302 raise DevtoolError("The perf recipe does not actually check out " 313 raise DevtoolError("The perf recipe does not actually check out "
303 "source and thus cannot be supported by this tool", 314 "source and thus cannot be supported by this tool",
@@ -353,7 +364,7 @@ def _move_file(src, dst, dry_run_outdir=None, base_outdir=None):
353 bb.utils.mkdirhier(dst_d) 364 bb.utils.mkdirhier(dst_d)
354 shutil.move(src, dst) 365 shutil.move(src, dst)
355 366
356def _copy_file(src, dst, dry_run_outdir=None): 367def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None):
357 """Copy a file. Creates all the directory components of destination path.""" 368 """Copy a file. Creates all the directory components of destination path."""
358 dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' 369 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
359 logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) 370 logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix))
@@ -367,7 +378,7 @@ def _copy_file(src, dst, dry_run_outdir=None):
367 378
368def _git_ls_tree(repodir, treeish='HEAD', recursive=False): 379def _git_ls_tree(repodir, treeish='HEAD', recursive=False):
369 """List contents of a git treeish""" 380 """List contents of a git treeish"""
370 import bb 381 import bb.process
371 cmd = ['git', 'ls-tree', '-z', treeish] 382 cmd = ['git', 'ls-tree', '-z', treeish]
372 if recursive: 383 if recursive:
373 cmd.append('-r') 384 cmd.append('-r')
@@ -380,6 +391,19 @@ def _git_ls_tree(repodir, treeish='HEAD', recursive=False):
380 ret[split[3]] = split[0:3] 391 ret[split[3]] = split[0:3]
381 return ret 392 return ret
382 393
394def _git_modified(repodir):
395 """List the difference between HEAD and the index"""
396 import bb.process
397 cmd = ['git', 'status', '--porcelain']
398 out, _ = bb.process.run(cmd, cwd=repodir)
399 ret = []
400 if out:
401 for line in out.split("\n"):
402 if line and not line.startswith('??'):
403 ret.append(line[3:])
404 return ret
405
406
383def _git_exclude_path(srctree, path): 407def _git_exclude_path(srctree, path):
384 """Return pathspec (list of paths) that excludes certain path""" 408 """Return pathspec (list of paths) that excludes certain path"""
385 # NOTE: "Filtering out" files/paths in this way is not entirely reliable - 409 # NOTE: "Filtering out" files/paths in this way is not entirely reliable -
@@ -407,8 +431,6 @@ def _ls_tree(directory):
407 431
408def extract(args, config, basepath, workspace): 432def extract(args, config, basepath, workspace):
409 """Entry point for the devtool 'extract' subcommand""" 433 """Entry point for the devtool 'extract' subcommand"""
410 import bb
411
412 tinfoil = setup_tinfoil(basepath=basepath, tracking=True) 434 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
413 if not tinfoil: 435 if not tinfoil:
414 # Error already shown 436 # Error already shown
@@ -431,8 +453,6 @@ def extract(args, config, basepath, workspace):
431 453
432def sync(args, config, basepath, workspace): 454def sync(args, config, basepath, workspace):
433 """Entry point for the devtool 'sync' subcommand""" 455 """Entry point for the devtool 'sync' subcommand"""
434 import bb
435
436 tinfoil = setup_tinfoil(basepath=basepath, tracking=True) 456 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
437 if not tinfoil: 457 if not tinfoil:
438 # Error already shown 458 # Error already shown
@@ -453,41 +473,11 @@ def sync(args, config, basepath, workspace):
453 finally: 473 finally:
454 tinfoil.shutdown() 474 tinfoil.shutdown()
455 475
456def symlink_oelocal_files_srctree(rd,srctree):
457 import oe.patch
458 if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')):
459 # If recipe extracts to ${WORKDIR}, symlink the files into the srctree
460 # (otherwise the recipe won't build as expected)
461 local_files_dir = os.path.join(srctree, 'oe-local-files')
462 addfiles = []
463 for root, _, files in os.walk(local_files_dir):
464 relpth = os.path.relpath(root, local_files_dir)
465 if relpth != '.':
466 bb.utils.mkdirhier(os.path.join(srctree, relpth))
467 for fn in files:
468 if fn == '.gitignore':
469 continue
470 destpth = os.path.join(srctree, relpth, fn)
471 if os.path.exists(destpth):
472 os.unlink(destpth)
473 if relpth != '.':
474 back_relpth = os.path.relpath(local_files_dir, root)
475 os.symlink('%s/oe-local-files/%s/%s' % (back_relpth, relpth, fn), destpth)
476 else:
477 os.symlink('oe-local-files/%s' % fn, destpth)
478 addfiles.append(os.path.join(relpth, fn))
479 if addfiles:
480 bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree)
481 useroptions = []
482 oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
483 bb.process.run('git %s commit -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree)
484
485
486def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): 476def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False):
487 """Extract sources of a recipe""" 477 """Extract sources of a recipe"""
488 import oe.recipeutils
489 import oe.patch
490 import oe.path 478 import oe.path
479 import bb.data
480 import bb.process
491 481
492 pn = d.getVar('PN') 482 pn = d.getVar('PN')
493 483
@@ -520,7 +510,9 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
520 for event in history: 510 for event in history:
521 if not 'flag' in event: 511 if not 'flag' in event:
522 if event['op'].startswith((':append[', ':prepend[')): 512 if event['op'].startswith((':append[', ':prepend[')):
523 extra_overrides.append(event['op'].split('[')[1].split(']')[0]) 513 override = event['op'].split('[')[1].split(']')[0]
514 if not override.startswith('pn-'):
515 extra_overrides.append(override)
524 # We want to remove duplicate overrides. If a recipe had multiple 516 # We want to remove duplicate overrides. If a recipe had multiple
525 # SRC_URI_override += values it would cause mulitple instances of 517 # SRC_URI_override += values it would cause mulitple instances of
526 # overrides. This doesn't play nicely with things like creating a 518 # overrides. This doesn't play nicely with things like creating a
@@ -550,6 +542,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
550 tempbasedir = d.getVar('WORKDIR') 542 tempbasedir = d.getVar('WORKDIR')
551 bb.utils.mkdirhier(tempbasedir) 543 bb.utils.mkdirhier(tempbasedir)
552 tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) 544 tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir)
545 appendbackup = None
553 try: 546 try:
554 tinfoil.logger.setLevel(logging.WARNING) 547 tinfoil.logger.setLevel(logging.WARNING)
555 548
@@ -560,11 +553,13 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
560 appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') 553 appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak')
561 shutil.copyfile(appendfile, appendbackup) 554 shutil.copyfile(appendfile, appendbackup)
562 else: 555 else:
563 appendbackup = None
564 bb.utils.mkdirhier(os.path.dirname(appendfile)) 556 bb.utils.mkdirhier(os.path.dirname(appendfile))
565 logger.debug('writing append file %s' % appendfile) 557 logger.debug('writing append file %s' % appendfile)
566 with open(appendfile, 'a') as f: 558 with open(appendfile, 'a') as f:
567 f.write('###--- _extract_source\n') 559 f.write('###--- _extract_source\n')
560 f.write('deltask do_recipe_qa\n')
561 f.write('deltask do_recipe_qa_setscene\n')
562 f.write('ERROR_QA:remove = "patch-fuzz"\n')
568 f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir) 563 f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir)
569 f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch) 564 f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch)
570 if not is_kernel_yocto: 565 if not is_kernel_yocto:
@@ -582,6 +577,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
582 preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps') 577 preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps')
583 with open(preservestampfile, 'w') as f: 578 with open(preservestampfile, 'w') as f:
584 f.write(d.getVar('STAMP')) 579 f.write(d.getVar('STAMP'))
580 tinfoil.modified_files()
585 try: 581 try:
586 if is_kernel_yocto: 582 if is_kernel_yocto:
587 # We need to generate the kernel config 583 # We need to generate the kernel config
@@ -629,7 +625,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
629 srcsubdir = f.read() 625 srcsubdir = f.read()
630 except FileNotFoundError as e: 626 except FileNotFoundError as e:
631 raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) 627 raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e))
632 srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir')) 628 srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir', os.path.relpath(d.getVar('UNPACKDIR'), d.getVar('WORKDIR'))))
633 629
634 # Check if work-shared is empty, if yes 630 # Check if work-shared is empty, if yes
635 # find source and copy to work-shared 631 # find source and copy to work-shared
@@ -644,39 +640,26 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
644 640
645 if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer): 641 if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer):
646 shutil.rmtree(workshareddir) 642 shutil.rmtree(workshareddir)
647 oe.path.copyhardlinktree(srcsubdir,workshareddir) 643 oe.path.copyhardlinktree(srcsubdir, workshareddir)
648 elif not os.path.exists(workshareddir): 644 elif not os.path.exists(workshareddir):
649 oe.path.copyhardlinktree(srcsubdir,workshareddir) 645 oe.path.copyhardlinktree(srcsubdir, workshareddir)
650
651 tempdir_localdir = os.path.join(tempdir, 'oe-local-files')
652 srctree_localdir = os.path.join(srctree, 'oe-local-files')
653 646
654 if sync: 647 if sync:
655 bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) 648 try:
656 649 logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch))
657 # Move oe-local-files directory to srctree 650 bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree)
658 # As the oe-local-files is not part of the constructed git tree, 651
659 # remove them directly during the synchrounizating might surprise 652 # Use git fetch to update the source with the current recipe
660 # the users. Instead, we move it to oe-local-files.bak and remind 653 # To be able to update the currently checked out branch with
661 # user in the log message. 654 # possibly new history (no fast-forward) git needs to be told
662 if os.path.exists(srctree_localdir + '.bak'): 655 # that's ok
663 shutil.rmtree(srctree_localdir, srctree_localdir + '.bak') 656 logger.info('Syncing source files including patches to git branch: %s' % devbranch)
664 657 bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree)
665 if os.path.exists(srctree_localdir): 658 except bb.process.ExecutionError as e:
666 logger.info('Backing up current local file directory %s' % srctree_localdir) 659 raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e))
667 shutil.move(srctree_localdir, srctree_localdir + '.bak')
668
669 if os.path.exists(tempdir_localdir):
670 logger.info('Syncing local source files to srctree...')
671 shutil.copytree(tempdir_localdir, srctree_localdir)
672 else:
673 # Move oe-local-files directory to srctree
674 if os.path.exists(tempdir_localdir):
675 logger.info('Adding local source files to srctree...')
676 shutil.move(tempdir_localdir, srcsubdir)
677 660
661 else:
678 shutil.move(srcsubdir, srctree) 662 shutil.move(srcsubdir, srctree)
679 symlink_oelocal_files_srctree(d,srctree)
680 663
681 if is_kernel_yocto: 664 if is_kernel_yocto:
682 logger.info('Copying kernel config to srctree') 665 logger.info('Copying kernel config to srctree')
@@ -695,8 +678,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
695 678
696def _add_md5(config, recipename, filename): 679def _add_md5(config, recipename, filename):
697 """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" 680 """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace"""
698 import bb.utils
699
700 def addfile(fn): 681 def addfile(fn):
701 md5 = bb.utils.md5_file(fn) 682 md5 = bb.utils.md5_file(fn)
702 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: 683 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f:
@@ -715,7 +696,6 @@ def _add_md5(config, recipename, filename):
715def _check_preserve(config, recipename): 696def _check_preserve(config, recipename):
716 """Check if a file was manually changed and needs to be saved in 'attic' 697 """Check if a file was manually changed and needs to be saved in 'attic'
717 directory""" 698 directory"""
718 import bb.utils
719 origfile = os.path.join(config.workspace_path, '.devtool_md5') 699 origfile = os.path.join(config.workspace_path, '.devtool_md5')
720 newfile = os.path.join(config.workspace_path, '.devtool_md5_new') 700 newfile = os.path.join(config.workspace_path, '.devtool_md5_new')
721 preservepath = os.path.join(config.workspace_path, 'attic', recipename) 701 preservepath = os.path.join(config.workspace_path, 'attic', recipename)
@@ -746,26 +726,36 @@ def _check_preserve(config, recipename):
746 726
747def get_staging_kver(srcdir): 727def get_staging_kver(srcdir):
748 # Kernel version from work-shared 728 # Kernel version from work-shared
749 kerver = [] 729 import itertools
750 staging_kerVer="" 730 try:
751 if os.path.exists(srcdir) and os.listdir(srcdir): 731 with open(os.path.join(srcdir, "Makefile")) as f:
752 with open(os.path.join(srcdir,"Makefile")) as f: 732 # Take VERSION, PATCHLEVEL, SUBLEVEL from lines 1, 2, 3
753 version = [next(f) for x in range(5)][1:4] 733 return ".".join(line.rstrip().split('= ')[1] for line in itertools.islice(f, 1, 4))
754 for word in version: 734 except FileNotFoundError:
755 kerver.append(word.split('= ')[1].split('\n')[0]) 735 return ""
756 staging_kerVer = ".".join(kerver)
757 return staging_kerVer
758 736
759def get_staging_kbranch(srcdir): 737def get_staging_kbranch(srcdir):
738 import bb.process
760 staging_kbranch = "" 739 staging_kbranch = ""
761 if os.path.exists(srcdir) and os.listdir(srcdir): 740 if os.path.exists(srcdir) and os.listdir(srcdir):
762 (branch, _) = bb.process.run('git branch | grep \* | cut -d \' \' -f2', cwd=srcdir) 741 (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir)
763 staging_kbranch = "".join(branch.split('\n')[0]) 742 staging_kbranch = "".join(branch.split('\n')[0])
764 return staging_kbranch 743 return staging_kbranch
765 744
745def get_real_srctree(srctree, s, unpackdir):
746 # Check that recipe isn't using a shared workdir
747 s = os.path.abspath(s)
748 unpackdir = os.path.abspath(unpackdir)
749 if s.startswith(unpackdir) and s != unpackdir and os.path.dirname(s) != unpackdir:
750 # Handle if S is set to a subdirectory of the source
751 srcsubdir = os.path.relpath(s, unpackdir).split(os.sep, 1)[1]
752 srctree = os.path.join(srctree, srcsubdir)
753 return srctree
754
766def modify(args, config, basepath, workspace): 755def modify(args, config, basepath, workspace):
767 """Entry point for the devtool 'modify' subcommand""" 756 """Entry point for the devtool 'modify' subcommand"""
768 import bb 757 import bb.data
758 import bb.process
769 import oe.recipeutils 759 import oe.recipeutils
770 import oe.patch 760 import oe.patch
771 import oe.path 761 import oe.path
@@ -807,8 +797,8 @@ def modify(args, config, basepath, workspace):
807 797
808 _check_compatible_recipe(pn, rd) 798 _check_compatible_recipe(pn, rd)
809 799
810 initial_rev = None 800 initial_revs = {}
811 commits = [] 801 commits = {}
812 check_commits = False 802 check_commits = False
813 803
814 if bb.data.inherits_class('kernel-yocto', rd): 804 if bb.data.inherits_class('kernel-yocto', rd):
@@ -820,36 +810,22 @@ def modify(args, config, basepath, workspace):
820 staging_kerVer = get_staging_kver(srcdir) 810 staging_kerVer = get_staging_kver(srcdir)
821 staging_kbranch = get_staging_kbranch(srcdir) 811 staging_kbranch = get_staging_kbranch(srcdir)
822 if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): 812 if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch):
823 oe.path.copyhardlinktree(srcdir,srctree) 813 oe.path.copyhardlinktree(srcdir, srctree)
824 workdir = rd.getVar('WORKDIR') 814 unpackdir = rd.getVar('UNPACKDIR')
825 srcsubdir = rd.getVar('S') 815 srcsubdir = rd.getVar('S')
826 localfilesdir = os.path.join(srctree,'oe-local-files')
827 # Move local source files into separate subdir
828 recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)]
829 local_files = oe.recipeutils.get_recipe_local_files(rd)
830 816
831 for key in local_files.copy(): 817 # Add locally copied files to gitignore as we add back to the metadata directly
832 if key.endswith('scc'): 818 local_files = oe.recipeutils.get_recipe_local_files(rd)
833 sccfile = open(local_files[key], 'r')
834 for l in sccfile:
835 line = l.split()
836 if line and line[0] in ('kconf', 'patch'):
837 cfg = os.path.join(os.path.dirname(local_files[key]), line[-1])
838 if not cfg in local_files.values():
839 local_files[line[-1]] = cfg
840 shutil.copy2(cfg, workdir)
841 sccfile.close()
842
843 # Ignore local files with subdir={BP}
844 srcabspath = os.path.abspath(srcsubdir) 819 srcabspath = os.path.abspath(srcsubdir)
845 local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))] 820 local_files = [fname for fname in local_files if
821 os.path.exists(os.path.join(unpackdir, fname)) and
822 srcabspath == unpackdir]
846 if local_files: 823 if local_files:
847 for fname in local_files: 824 with open(os.path.join(srctree, '.gitignore'), 'a+') as f:
848 _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) 825 f.write('# Ignore local files, by default. Remove following lines'
849 with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: 826 'if you want to commit the directory to Git\n')
850 f.write('# Ignore local files, by default. Remove this file ''if you want to commit the directory to Git\n*\n') 827 for fname in local_files:
851 828 f.write('%s\n' % fname)
852 symlink_oelocal_files_srctree(rd,srctree)
853 829
854 task = 'do_configure' 830 task = 'do_configure'
855 res = tinfoil.build_targets(pn, task, handle_events=True) 831 res = tinfoil.build_targets(pn, task, handle_events=True)
@@ -857,22 +833,33 @@ def modify(args, config, basepath, workspace):
857 # Copy .config to workspace 833 # Copy .config to workspace
858 kconfpath = rd.getVar('B') 834 kconfpath = rd.getVar('B')
859 logger.info('Copying kernel config to workspace') 835 logger.info('Copying kernel config to workspace')
860 shutil.copy2(os.path.join(kconfpath, '.config'),srctree) 836 shutil.copy2(os.path.join(kconfpath, '.config'), srctree)
861 837
862 # Set this to true, we still need to get initial_rev 838 # Set this to true, we still need to get initial_rev
863 # by parsing the git repo 839 # by parsing the git repo
864 args.no_extract = True 840 args.no_extract = True
865 841
866 if not args.no_extract: 842 if not args.no_extract:
867 initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) 843 initial_revs["."], _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
868 if not initial_rev: 844 if not initial_revs["."]:
869 return 1 845 return 1
870 logger.info('Source tree extracted to %s' % srctree) 846 logger.info('Source tree extracted to %s' % srctree)
847
871 if os.path.exists(os.path.join(srctree, '.git')): 848 if os.path.exists(os.path.join(srctree, '.git')):
872 # Get list of commits since this revision 849 # Get list of commits since this revision
873 (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=srctree) 850 (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree)
874 commits = stdout.split() 851 commits["."] = stdout.split()
875 check_commits = True 852 check_commits = True
853 try:
854 (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree)
855 except bb.process.ExecutionError:
856 stdout = ""
857 for line in stdout.splitlines():
858 (rev, submodule_path) = line.split()
859 submodule = os.path.relpath(submodule_path, srctree)
860 initial_revs[submodule] = rev
861 (stdout, _) = bb.process.run('git rev-list --reverse devtool-base..HEAD', cwd=submodule_path)
862 commits[submodule] = stdout.split()
876 else: 863 else:
877 if os.path.exists(os.path.join(srctree, '.git')): 864 if os.path.exists(os.path.join(srctree, '.git')):
878 # Check if it's a tree previously extracted by us. This is done 865 # Check if it's a tree previously extracted by us. This is done
@@ -889,11 +876,11 @@ def modify(args, config, basepath, workspace):
889 for line in stdout.splitlines(): 876 for line in stdout.splitlines():
890 if line.startswith('*'): 877 if line.startswith('*'):
891 (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree) 878 (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree)
892 initial_rev = stdout.rstrip() 879 initial_revs["."] = stdout.rstrip()
893 if not initial_rev: 880 if "." not in initial_revs:
894 # Otherwise, just grab the head revision 881 # Otherwise, just grab the head revision
895 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) 882 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
896 initial_rev = stdout.rstrip() 883 initial_revs["."] = stdout.rstrip()
897 884
898 branch_patches = {} 885 branch_patches = {}
899 if check_commits: 886 if check_commits:
@@ -910,28 +897,40 @@ def modify(args, config, basepath, workspace):
910 seen_patches = [] 897 seen_patches = []
911 for branch in branches: 898 for branch in branches:
912 branch_patches[branch] = [] 899 branch_patches[branch] = []
913 (stdout, _) = bb.process.run('git log devtool-base..%s' % branch, cwd=srctree) 900 (stdout, _) = bb.process.run('git rev-list devtool-base..%s' % branch, cwd=srctree)
914 for line in stdout.splitlines(): 901 for sha1 in stdout.splitlines():
915 line = line.strip() 902 notes = oe.patch.GitApplyTree.getNotes(srctree, sha1.strip())
916 if line.startswith(oe.patch.GitApplyTree.patch_line_prefix): 903 origpatch = notes.get(oe.patch.GitApplyTree.original_patch)
917 origpatch = line[len(oe.patch.GitApplyTree.patch_line_prefix):].split(':', 1)[-1].strip() 904 if origpatch and origpatch not in seen_patches:
918 if not origpatch in seen_patches: 905 seen_patches.append(origpatch)
919 seen_patches.append(origpatch) 906 branch_patches[branch].append(origpatch)
920 branch_patches[branch].append(origpatch)
921 907
922 # Need to grab this here in case the source is within a subdirectory 908 # Need to grab this here in case the source is within a subdirectory
923 srctreebase = srctree 909 srctreebase = srctree
924 910 srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR'))
925 # Check that recipe isn't using a shared workdir
926 s = os.path.abspath(rd.getVar('S'))
927 workdir = os.path.abspath(rd.getVar('WORKDIR'))
928 if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
929 # Handle if S is set to a subdirectory of the source
930 srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
931 srctree = os.path.join(srctree, srcsubdir)
932 911
933 bb.utils.mkdirhier(os.path.dirname(appendfile)) 912 bb.utils.mkdirhier(os.path.dirname(appendfile))
934 with open(appendfile, 'w') as f: 913 with open(appendfile, 'w') as f:
914 # if not present, add type=git-dependency to the secondary sources
915 # (non local files) so they can be extracted correctly when building a recipe after
916 # doing a devtool modify on it
917 src_uri = rd.getVar('SRC_URI').split()
918 src_uri_append = []
919 src_uri_remove = []
920
921 # Assume first entry is main source extracted in ${S} so skip it
922 src_uri = src_uri[1::]
923
924 # Add "type=git-dependency" to all non local sources
925 for url in src_uri:
926 if not url.startswith('file://') and not 'type=' in url:
927 src_uri_remove.append(url)
928 src_uri_append.append('%s;type=git-dependency' % url)
929
930 if src_uri_remove:
931 f.write('SRC_URI:remove = "%s"\n' % ' '.join(src_uri_remove))
932 f.write('SRC_URI:append = " %s"\n\n' % ' '.join(src_uri_append))
933
935 f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n') 934 f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n')
936 # Local files can be modified/tracked in separate subdir under srctree 935 # Local files can be modified/tracked in separate subdir under srctree
937 # Mostly useful for packages with S != WORKDIR 936 # Mostly useful for packages with S != WORKDIR
@@ -948,34 +947,31 @@ def modify(args, config, basepath, workspace):
948 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) 947 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
949 948
950 if bb.data.inherits_class('kernel', rd): 949 if bb.data.inherits_class('kernel', rd):
951 f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout '
952 'do_fetch do_unpack do_kernel_configcheck"\n')
953 f.write('\ndo_patch[noexec] = "1"\n')
954 f.write('\ndo_configure:append() {\n'
955 ' cp ${B}/.config ${S}/.config.baseline\n'
956 ' ln -sfT ${B}/.config ${S}/.config.new\n'
957 '}\n')
958 f.write('\ndo_kernel_configme:prepend() {\n' 950 f.write('\ndo_kernel_configme:prepend() {\n'
959 ' if [ -e ${S}/.config ]; then\n' 951 ' if [ -e ${S}/.config ]; then\n'
960 ' mv ${S}/.config ${S}/.config.old\n' 952 ' mv ${S}/.config ${S}/.config.old\n'
961 ' fi\n' 953 ' fi\n'
962 '}\n') 954 '}\n')
963 if rd.getVarFlag('do_menuconfig','task'): 955 if rd.getVarFlag('do_menuconfig', 'task'):
964 f.write('\ndo_configure:append() {\n' 956 f.write('\ndo_configure:append() {\n'
965 ' if [ ! ${DEVTOOL_DISABLE_MENUCONFIG} ]; then\n' 957 ' if [ ${@oe.types.boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG"))} = True ]; then\n'
966 ' cp ${B}/.config ${S}/.config.baseline\n' 958 ' cp ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.baseline\n'
967 ' ln -sfT ${B}/.config ${S}/.config.new\n' 959 ' ln -sfT ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.new\n'
968 ' fi\n' 960 ' fi\n'
969 '}\n') 961 '}\n')
970 if initial_rev: 962 if initial_revs:
971 f.write('\n# initial_rev: %s\n' % initial_rev) 963 for name, rev in initial_revs.items():
972 for commit in commits: 964 f.write('\n# initial_rev %s: %s\n' % (name, rev))
973 f.write('# commit: %s\n' % commit) 965 if name in commits:
966 for commit in commits[name]:
967 f.write('# commit %s: %s\n' % (name, commit))
974 if branch_patches: 968 if branch_patches:
975 for branch in branch_patches: 969 for branch in branch_patches:
976 if branch == args.branch: 970 if branch == args.branch:
977 continue 971 continue
978 f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) 972 f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch])))
973 if args.debug_build:
974 f.write('\nDEBUG_BUILD = "1"\n')
979 975
980 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) 976 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
981 977
@@ -1020,6 +1016,7 @@ def rename(args, config, basepath, workspace):
1020 origfnver = '' 1016 origfnver = ''
1021 1017
1022 recipefilemd5 = None 1018 recipefilemd5 = None
1019 newrecipefilemd5 = None
1023 tinfoil = setup_tinfoil(basepath=basepath, tracking=True) 1020 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
1024 try: 1021 try:
1025 rd = parse_recipe(config, tinfoil, args.recipename, True) 1022 rd = parse_recipe(config, tinfoil, args.recipename, True)
@@ -1097,6 +1094,7 @@ def rename(args, config, basepath, workspace):
1097 1094
1098 # Rename source tree if it's the default path 1095 # Rename source tree if it's the default path
1099 appendmd5 = None 1096 appendmd5 = None
1097 newappendmd5 = None
1100 if not args.no_srctree: 1098 if not args.no_srctree:
1101 srctree = workspace[args.recipename]['srctree'] 1099 srctree = workspace[args.recipename]['srctree']
1102 if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): 1100 if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename):
@@ -1185,7 +1183,7 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre
1185 """Get initial and update rev of a recipe. These are the start point of the 1183 """Get initial and update rev of a recipe. These are the start point of the
1186 whole patchset and start point for the patches to be re-generated/updated. 1184 whole patchset and start point for the patches to be re-generated/updated.
1187 """ 1185 """
1188 import bb 1186 import bb.process
1189 1187
1190 # Get current branch 1188 # Get current branch
1191 stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', 1189 stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD',
@@ -1193,44 +1191,56 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre
1193 branchname = stdout.rstrip() 1191 branchname = stdout.rstrip()
1194 1192
1195 # Parse initial rev from recipe if not specified 1193 # Parse initial rev from recipe if not specified
1196 commits = [] 1194 commits = {}
1197 patches = [] 1195 patches = []
1196 initial_revs = {}
1198 with open(recipe_path, 'r') as f: 1197 with open(recipe_path, 'r') as f:
1199 for line in f: 1198 for line in f:
1200 if line.startswith('# initial_rev:'): 1199 pattern = r'^#\s.*\s(.*):\s([0-9a-fA-F]+)$'
1201 if not initial_rev: 1200 match = re.search(pattern, line)
1202 initial_rev = line.split(':')[-1].strip() 1201 if match:
1203 elif line.startswith('# commit:') and not force_patch_refresh: 1202 name = match.group(1)
1204 commits.append(line.split(':')[-1].strip()) 1203 rev = match.group(2)
1205 elif line.startswith('# patches_%s:' % branchname): 1204 if line.startswith('# initial_rev'):
1206 patches = line.split(':')[-1].strip().split(',') 1205 if not (name == "." and initial_rev):
1207 1206 initial_revs[name] = rev
1208 update_rev = initial_rev 1207 elif line.startswith('# commit') and not force_patch_refresh:
1209 changed_revs = None 1208 if name not in commits:
1210 if initial_rev: 1209 commits[name] = [rev]
1210 else:
1211 commits[name].append(rev)
1212 elif line.startswith('# patches_%s:' % branchname):
1213 patches = line.split(':')[-1].strip().split(',')
1214
1215 update_revs = dict(initial_revs)
1216 changed_revs = {}
1217 for name, rev in initial_revs.items():
1211 # Find first actually changed revision 1218 # Find first actually changed revision
1212 stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' % 1219 stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' %
1213 initial_rev, cwd=srctree) 1220 rev, cwd=os.path.join(srctree, name))
1214 newcommits = stdout.split() 1221 newcommits = stdout.split()
1215 for i in range(min(len(commits), len(newcommits))): 1222 if name in commits:
1216 if newcommits[i] == commits[i]: 1223 for i in range(min(len(commits[name]), len(newcommits))):
1217 update_rev = commits[i] 1224 if newcommits[i] == commits[name][i]:
1225 update_revs[name] = commits[name][i]
1218 1226
1219 try: 1227 try:
1220 stdout, _ = bb.process.run('git cherry devtool-patched', 1228 stdout, _ = bb.process.run('git cherry devtool-patched',
1221 cwd=srctree) 1229 cwd=os.path.join(srctree, name))
1222 except bb.process.ExecutionError as err: 1230 except bb.process.ExecutionError as err:
1223 stdout = None 1231 stdout = None
1224 1232
1225 if stdout is not None and not force_patch_refresh: 1233 if stdout is not None and not force_patch_refresh:
1226 changed_revs = []
1227 for line in stdout.splitlines(): 1234 for line in stdout.splitlines():
1228 if line.startswith('+ '): 1235 if line.startswith('+ '):
1229 rev = line.split()[1] 1236 rev = line.split()[1]
1230 if rev in newcommits: 1237 if rev in newcommits:
1231 changed_revs.append(rev) 1238 if name not in changed_revs:
1239 changed_revs[name] = [rev]
1240 else:
1241 changed_revs[name].append(rev)
1232 1242
1233 return initial_rev, update_rev, changed_revs, patches 1243 return initial_revs, update_revs, changed_revs, patches
1234 1244
1235def _remove_file_entries(srcuri, filelist): 1245def _remove_file_entries(srcuri, filelist):
1236 """Remove file:// entries from SRC_URI""" 1246 """Remove file:// entries from SRC_URI"""
@@ -1285,17 +1295,21 @@ def _remove_source_files(append, files, destpath, no_report_remove=False, dry_ru
1285 raise 1295 raise
1286 1296
1287 1297
1288def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): 1298def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None):
1289 """Export patches from srctree to given location. 1299 """Export patches from srctree to given location.
1290 Returns three-tuple of dicts: 1300 Returns three-tuple of dicts:
1291 1. updated - patches that already exist in SRCURI 1301 1. updated - patches that already exist in SRCURI
1292 2. added - new patches that don't exist in SRCURI 1302 2. added - new patches that don't exist in SRCURI
1293 3 removed - patches that exist in SRCURI but not in exported patches 1303 3 removed - patches that exist in SRCURI but not in exported patches
1294 In each dict the key is the 'basepath' of the URI and value is the 1304 In each dict the key is the 'basepath' of the URI and value is:
1295 absolute path to the existing file in recipe space (if any). 1305 - for updated and added dicts, a dict with 2 optionnal keys:
1306 - 'path': the absolute path to the existing file in recipe space (if any)
1307 - 'patchdir': the directory in wich the patch should be applied (if any)
1308 - for removed dict, the absolute path to the existing file in recipe space
1296 """ 1309 """
1297 import oe.recipeutils 1310 import oe.recipeutils
1298 from oe.patch import GitApplyTree 1311 from oe.patch import GitApplyTree
1312 import bb.process
1299 updated = OrderedDict() 1313 updated = OrderedDict()
1300 added = OrderedDict() 1314 added = OrderedDict()
1301 seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') 1315 seqpatch_re = re.compile('^([0-9]{4}-)?(.+)')
@@ -1306,59 +1320,67 @@ def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None):
1306 1320
1307 # Generate patches from Git, exclude local files directory 1321 # Generate patches from Git, exclude local files directory
1308 patch_pathspec = _git_exclude_path(srctree, 'oe-local-files') 1322 patch_pathspec = _git_exclude_path(srctree, 'oe-local-files')
1309 GitApplyTree.extractPatches(srctree, start_rev, destdir, patch_pathspec) 1323 GitApplyTree.extractPatches(srctree, start_revs, destdir, patch_pathspec)
1310 1324 for dirpath, dirnames, filenames in os.walk(destdir):
1311 new_patches = sorted(os.listdir(destdir)) 1325 new_patches = filenames
1312 for new_patch in new_patches: 1326 reldirpath = os.path.relpath(dirpath, destdir)
1313 # Strip numbering from patch names. If it's a git sequence named patch, 1327 for new_patch in new_patches:
1314 # the numbers might not match up since we are starting from a different 1328 # Strip numbering from patch names. If it's a git sequence named patch,
1315 # revision This does assume that people are using unique shortlog 1329 # the numbers might not match up since we are starting from a different
1316 # values, but they ought to be anyway... 1330 # revision This does assume that people are using unique shortlog
1317 new_basename = seqpatch_re.match(new_patch).group(2) 1331 # values, but they ought to be anyway...
1318 match_name = None 1332 new_basename = seqpatch_re.match(new_patch).group(2)
1319 for old_patch in existing_patches: 1333 match_name = None
1320 old_basename = seqpatch_re.match(old_patch).group(2) 1334 old_patch = None
1321 old_basename_splitext = os.path.splitext(old_basename) 1335 for old_patch in existing_patches:
1322 if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: 1336 old_basename = seqpatch_re.match(old_patch).group(2)
1323 old_patch_noext = os.path.splitext(old_patch)[0] 1337 old_basename_splitext = os.path.splitext(old_basename)
1324 match_name = old_patch_noext 1338 if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename:
1325 break 1339 old_patch_noext = os.path.splitext(old_patch)[0]
1326 elif new_basename == old_basename: 1340 match_name = old_patch_noext
1327 match_name = old_patch 1341 break
1328 break 1342 elif new_basename == old_basename:
1329 if match_name: 1343 match_name = old_patch
1330 # Rename patch files 1344 break
1331 if new_patch != match_name: 1345 if match_name:
1332 bb.utils.rename(os.path.join(destdir, new_patch), 1346 # Rename patch files
1333 os.path.join(destdir, match_name)) 1347 if new_patch != match_name:
1334 # Need to pop it off the list now before checking changed_revs 1348 bb.utils.rename(os.path.join(destdir, new_patch),
1335 oldpath = existing_patches.pop(old_patch) 1349 os.path.join(destdir, match_name))
1336 if changed_revs is not None: 1350 # Need to pop it off the list now before checking changed_revs
1337 # Avoid updating patches that have not actually changed 1351 oldpath = existing_patches.pop(old_patch)
1338 with open(os.path.join(destdir, match_name), 'r') as f: 1352 if changed_revs is not None and dirpath in changed_revs:
1339 firstlineitems = f.readline().split() 1353 # Avoid updating patches that have not actually changed
1340 # Looking for "From <hash>" line 1354 with open(os.path.join(dirpath, match_name), 'r') as f:
1341 if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: 1355 firstlineitems = f.readline().split()
1342 if not firstlineitems[1] in changed_revs: 1356 # Looking for "From <hash>" line
1343 continue 1357 if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
1344 # Recompress if necessary 1358 if not firstlineitems[1] in changed_revs[dirpath]:
1345 if oldpath.endswith(('.gz', '.Z')): 1359 continue
1346 bb.process.run(['gzip', match_name], cwd=destdir) 1360 # Recompress if necessary
1347 if oldpath.endswith('.gz'): 1361 if oldpath.endswith(('.gz', '.Z')):
1348 match_name += '.gz' 1362 bb.process.run(['gzip', match_name], cwd=destdir)
1349 else: 1363 if oldpath.endswith('.gz'):
1350 match_name += '.Z' 1364 match_name += '.gz'
1351 elif oldpath.endswith('.bz2'): 1365 else:
1352 bb.process.run(['bzip2', match_name], cwd=destdir) 1366 match_name += '.Z'
1353 match_name += '.bz2' 1367 elif oldpath.endswith('.bz2'):
1354 updated[match_name] = oldpath 1368 bb.process.run(['bzip2', match_name], cwd=destdir)
1355 else: 1369 match_name += '.bz2'
1356 added[new_patch] = None 1370 updated[match_name] = {'path' : oldpath}
1371 if reldirpath != ".":
1372 updated[match_name]['patchdir'] = reldirpath
1373 else:
1374 added[new_patch] = {}
1375 if reldirpath != ".":
1376 added[new_patch]['patchdir'] = reldirpath
1377
1357 return (updated, added, existing_patches) 1378 return (updated, added, existing_patches)
1358 1379
1359 1380
1360def _create_kconfig_diff(srctree, rd, outfile): 1381def _create_kconfig_diff(srctree, rd, outfile):
1361 """Create a kconfig fragment""" 1382 """Create a kconfig fragment"""
1383 import bb.process
1362 # Only update config fragment if both config files exist 1384 # Only update config fragment if both config files exist
1363 orig_config = os.path.join(srctree, '.config.baseline') 1385 orig_config = os.path.join(srctree, '.config.baseline')
1364 new_config = os.path.join(srctree, '.config.new') 1386 new_config = os.path.join(srctree, '.config.new')
@@ -1390,38 +1412,59 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1390 1. updated - files that already exist in SRCURI 1412 1. updated - files that already exist in SRCURI
1391 2. added - new files files that don't exist in SRCURI 1413 2. added - new files files that don't exist in SRCURI
1392 3 removed - files that exist in SRCURI but not in exported files 1414 3 removed - files that exist in SRCURI but not in exported files
1393 In each dict the key is the 'basepath' of the URI and value is the 1415 In each dict the key is the 'basepath' of the URI and value is:
1394 absolute path to the existing file in recipe space (if any). 1416 - for updated and added dicts, a dict with 1 optionnal key:
1417 - 'path': the absolute path to the existing file in recipe space (if any)
1418 - for removed dict, the absolute path to the existing file in recipe space
1395 """ 1419 """
1396 import oe.recipeutils 1420 import oe.recipeutils
1421 import bb.data
1422 import bb.process
1397 1423
1398 # Find out local files (SRC_URI files that exist in the "recipe space"). 1424 # Find out local files (SRC_URI files that exist in the "recipe space").
1399 # Local files that reside in srctree are not included in patch generation. 1425 # Local files that reside in srctree are not included in patch generation.
1400 # Instead they are directly copied over the original source files (in 1426 # Instead they are directly copied over the original source files (in
1401 # recipe space). 1427 # recipe space).
1402 existing_files = oe.recipeutils.get_recipe_local_files(rd) 1428 existing_files = oe.recipeutils.get_recipe_local_files(rd)
1429
1403 new_set = None 1430 new_set = None
1404 updated = OrderedDict() 1431 updated = OrderedDict()
1405 added = OrderedDict() 1432 added = OrderedDict()
1406 removed = OrderedDict() 1433 removed = OrderedDict()
1407 local_files_dir = os.path.join(srctreebase, 'oe-local-files') 1434
1408 git_files = _git_ls_tree(srctree) 1435 # Get current branch and return early with empty lists
1409 if 'oe-local-files' in git_files: 1436 # if on one of the override branches
1410 # If tracked by Git, take the files from srctree HEAD. First get 1437 # (local files are provided only for the main branch and processing
1411 # the tree object of the directory 1438 # them against lists from recipe overrides will result in mismatches
1412 tmp_index = os.path.join(srctree, '.git', 'index.tmp.devtool') 1439 # and broken modifications to recipes).
1413 tree = git_files['oe-local-files'][2] 1440 stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD',
1414 bb.process.run(['git', 'checkout', tree, '--', '.'], cwd=srctree, 1441 cwd=srctree)
1415 env=dict(os.environ, GIT_WORK_TREE=destdir, 1442 branchname = stdout.rstrip()
1416 GIT_INDEX_FILE=tmp_index)) 1443 if branchname.startswith(override_branch_prefix):
1417 new_set = list(_git_ls_tree(srctree, tree, True).keys()) 1444 return (updated, added, removed)
1418 elif os.path.isdir(local_files_dir): 1445
1419 # If not tracked by Git, just copy from working copy 1446 files = _git_modified(srctree)
1420 new_set = _ls_tree(local_files_dir) 1447 #if not files:
1421 bb.process.run(['cp', '-ax', 1448 # files = _ls_tree(srctree)
1422 os.path.join(local_files_dir, '.'), destdir]) 1449 for f in files:
1423 else: 1450 fullfile = os.path.join(srctree, f)
1424 new_set = [] 1451 if os.path.exists(os.path.join(fullfile, ".git")):
1452 # submodules handled elsewhere
1453 continue
1454 if f not in existing_files:
1455 added[f] = {}
1456 if os.path.isdir(os.path.join(srctree, f)):
1457 shutil.copytree(fullfile, os.path.join(destdir, f))
1458 else:
1459 shutil.copy2(fullfile, os.path.join(destdir, f))
1460 elif not os.path.exists(fullfile):
1461 removed[f] = existing_files[f]
1462 elif f in existing_files:
1463 updated[f] = {'path' : existing_files[f]}
1464 if os.path.isdir(os.path.join(srctree, f)):
1465 shutil.copytree(fullfile, os.path.join(destdir, f))
1466 else:
1467 shutil.copy2(fullfile, os.path.join(destdir, f))
1425 1468
1426 # Special handling for kernel config 1469 # Special handling for kernel config
1427 if bb.data.inherits_class('kernel-yocto', rd): 1470 if bb.data.inherits_class('kernel-yocto', rd):
@@ -1429,17 +1472,14 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1429 fragment_path = os.path.join(destdir, fragment_fn) 1472 fragment_path = os.path.join(destdir, fragment_fn)
1430 if _create_kconfig_diff(srctree, rd, fragment_path): 1473 if _create_kconfig_diff(srctree, rd, fragment_path):
1431 if os.path.exists(fragment_path): 1474 if os.path.exists(fragment_path):
1432 if fragment_fn not in new_set: 1475 if fragment_fn in removed:
1433 new_set.append(fragment_fn) 1476 del removed[fragment_fn]
1434 # Copy fragment to local-files 1477 if fragment_fn not in updated and fragment_fn not in added:
1435 if os.path.isdir(local_files_dir): 1478 added[fragment_fn] = {}
1436 shutil.copy2(fragment_path, local_files_dir)
1437 else: 1479 else:
1438 if fragment_fn in new_set: 1480 if fragment_fn in updated:
1439 new_set.remove(fragment_fn) 1481 removed[fragment_fn] = updated[fragment_fn]
1440 # Remove fragment from local-files 1482 del updated[fragment_fn]
1441 if os.path.exists(os.path.join(local_files_dir, fragment_fn)):
1442 os.unlink(os.path.join(local_files_dir, fragment_fn))
1443 1483
1444 # Special handling for cml1, ccmake, etc bbclasses that generated 1484 # Special handling for cml1, ccmake, etc bbclasses that generated
1445 # configuration fragment files that are consumed as source files 1485 # configuration fragment files that are consumed as source files
@@ -1447,42 +1487,13 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1447 if bb.data.inherits_class(frag_class, rd): 1487 if bb.data.inherits_class(frag_class, rd):
1448 srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) 1488 srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name)
1449 if os.path.exists(srcpath): 1489 if os.path.exists(srcpath):
1450 if frag_name not in new_set: 1490 if frag_name in removed:
1451 new_set.append(frag_name) 1491 del removed[frag_name]
1492 if frag_name not in updated:
1493 added[frag_name] = {}
1452 # copy fragment into destdir 1494 # copy fragment into destdir
1453 shutil.copy2(srcpath, destdir) 1495 shutil.copy2(srcpath, destdir)
1454 # copy fragment into local files if exists 1496
1455 if os.path.isdir(local_files_dir):
1456 shutil.copy2(srcpath, local_files_dir)
1457
1458 if new_set is not None:
1459 for fname in new_set:
1460 if fname in existing_files:
1461 origpath = existing_files.pop(fname)
1462 workpath = os.path.join(local_files_dir, fname)
1463 if not filecmp.cmp(origpath, workpath):
1464 updated[fname] = origpath
1465 elif fname != '.gitignore':
1466 added[fname] = None
1467
1468 workdir = rd.getVar('WORKDIR')
1469 s = rd.getVar('S')
1470 if not s.endswith(os.sep):
1471 s += os.sep
1472
1473 if workdir != s:
1474 # Handle files where subdir= was specified
1475 for fname in list(existing_files.keys()):
1476 # FIXME handle both subdir starting with BP and not?
1477 fworkpath = os.path.join(workdir, fname)
1478 if fworkpath.startswith(s):
1479 fpath = os.path.join(srctree, os.path.relpath(fworkpath, s))
1480 if os.path.exists(fpath):
1481 origpath = existing_files.pop(fname)
1482 if not filecmp.cmp(origpath, fpath):
1483 updated[fpath] = origpath
1484
1485 removed = existing_files
1486 return (updated, added, removed) 1497 return (updated, added, removed)
1487 1498
1488 1499
@@ -1500,7 +1511,7 @@ def _determine_files_dir(rd):
1500 1511
1501def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): 1512def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None):
1502 """Implement the 'srcrev' mode of update-recipe""" 1513 """Implement the 'srcrev' mode of update-recipe"""
1503 import bb 1514 import bb.process
1504 import oe.recipeutils 1515 import oe.recipeutils
1505 1516
1506 dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' 1517 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
@@ -1509,6 +1520,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1509 recipedir = os.path.basename(recipefile) 1520 recipedir = os.path.basename(recipefile)
1510 logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix)) 1521 logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix))
1511 1522
1523 # Get original SRCREV
1524 old_srcrev = rd.getVar('SRCREV') or ''
1525 if old_srcrev == "INVALID":
1526 raise DevtoolError('Update mode srcrev is only valid for recipe fetched from an SCM repository')
1527 old_srcrev = {'.': old_srcrev}
1528
1512 # Get HEAD revision 1529 # Get HEAD revision
1513 try: 1530 try:
1514 stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree) 1531 stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
@@ -1532,16 +1549,16 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1532 local_files_dir = tempfile.mkdtemp(dir=tempdir) 1549 local_files_dir = tempfile.mkdtemp(dir=tempdir)
1533 srctreebase = workspace[recipename]['srctreebase'] 1550 srctreebase = workspace[recipename]['srctreebase']
1534 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) 1551 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
1552 removedentries = {}
1535 if not no_remove: 1553 if not no_remove:
1536 # Find list of existing patches in recipe file 1554 # Find list of existing patches in recipe file
1537 patches_dir = tempfile.mkdtemp(dir=tempdir) 1555 patches_dir = tempfile.mkdtemp(dir=tempdir)
1538 old_srcrev = rd.getVar('SRCREV') or ''
1539 upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev, 1556 upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev,
1540 patches_dir) 1557 patches_dir)
1541 logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p))) 1558 logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p)))
1542 1559
1543 # Remove deleted local files and "overlapping" patches 1560 # Remove deleted local files and "overlapping" patches
1544 remove_files = list(del_f.values()) + list(upd_p.values()) + list(del_p.values()) 1561 remove_files = list(del_f.values()) + [value["path"] for value in upd_p.values() if "path" in value] + [value["path"] for value in del_p.values() if "path" in value]
1545 if remove_files: 1562 if remove_files:
1546 removedentries = _remove_file_entries(srcuri, remove_files)[0] 1563 removedentries = _remove_file_entries(srcuri, remove_files)[0]
1547 update_srcuri = True 1564 update_srcuri = True
@@ -1555,14 +1572,14 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1555 patchfields['SRC_URI'] = '\\\n '.join(srcuri) 1572 patchfields['SRC_URI'] = '\\\n '.join(srcuri)
1556 if dry_run_outdir: 1573 if dry_run_outdir:
1557 logger.info('Creating bbappend (dry-run)') 1574 logger.info('Creating bbappend (dry-run)')
1558 else: 1575 appendfile, destpath = oe.recipeutils.bbappend_recipe(
1559 appendfile, destpath = oe.recipeutils.bbappend_recipe( 1576 rd, appendlayerdir, files, wildcardver=wildcard_version,
1560 rd, appendlayerdir, files, wildcardver=wildcard_version, 1577 extralines=patchfields, removevalues=removevalues,
1561 extralines=patchfields, removevalues=removevalues, 1578 redirect_output=dry_run_outdir)
1562 redirect_output=dry_run_outdir)
1563 else: 1579 else:
1564 files_dir = _determine_files_dir(rd) 1580 files_dir = _determine_files_dir(rd)
1565 for basepath, path in upd_f.items(): 1581 for basepath, param in upd_f.items():
1582 path = param['path']
1566 logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) 1583 logger.info('Updating file %s%s' % (basepath, dry_run_suffix))
1567 if os.path.isabs(basepath): 1584 if os.path.isabs(basepath):
1568 # Original file (probably with subdir pointing inside source tree) 1585 # Original file (probably with subdir pointing inside source tree)
@@ -1572,7 +1589,8 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1572 _move_file(os.path.join(local_files_dir, basepath), path, 1589 _move_file(os.path.join(local_files_dir, basepath), path,
1573 dry_run_outdir=dry_run_outdir, base_outdir=recipedir) 1590 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1574 update_srcuri= True 1591 update_srcuri= True
1575 for basepath, path in new_f.items(): 1592 for basepath, param in new_f.items():
1593 path = param['path']
1576 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) 1594 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
1577 _move_file(os.path.join(local_files_dir, basepath), 1595 _move_file(os.path.join(local_files_dir, basepath),
1578 os.path.join(files_dir, basepath), 1596 os.path.join(files_dir, basepath),
@@ -1595,7 +1613,6 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1595 1613
1596def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): 1614def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False):
1597 """Implement the 'patch' mode of update-recipe""" 1615 """Implement the 'patch' mode of update-recipe"""
1598 import bb
1599 import oe.recipeutils 1616 import oe.recipeutils
1600 1617
1601 recipefile = rd.getVar('FILE') 1618 recipefile = rd.getVar('FILE')
@@ -1604,9 +1621,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1604 if not os.path.exists(append): 1621 if not os.path.exists(append):
1605 raise DevtoolError('unable to find workspace bbappend for recipe %s' % 1622 raise DevtoolError('unable to find workspace bbappend for recipe %s' %
1606 recipename) 1623 recipename)
1624 srctreebase = workspace[recipename]['srctreebase']
1625 relpatchdir = os.path.relpath(srctreebase, srctree)
1626 if relpatchdir == '.':
1627 patchdir_params = {}
1628 else:
1629 patchdir_params = {'patchdir': relpatchdir}
1630
1631 def srcuri_entry(basepath, patchdir_params):
1632 if patchdir_params:
1633 paramstr = ';' + ';'.join('%s=%s' % (k,v) for k,v in patchdir_params.items())
1634 else:
1635 paramstr = ''
1636 return 'file://%s%s' % (basepath, paramstr)
1607 1637
1608 initial_rev, update_rev, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh) 1638 initial_revs, update_revs, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh)
1609 if not initial_rev: 1639 if not initial_revs:
1610 raise DevtoolError('Unable to find initial revision - please specify ' 1640 raise DevtoolError('Unable to find initial revision - please specify '
1611 'it with --initial-rev') 1641 'it with --initial-rev')
1612 1642
@@ -1620,61 +1650,69 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1620 tempdir = tempfile.mkdtemp(prefix='devtool') 1650 tempdir = tempfile.mkdtemp(prefix='devtool')
1621 try: 1651 try:
1622 local_files_dir = tempfile.mkdtemp(dir=tempdir) 1652 local_files_dir = tempfile.mkdtemp(dir=tempdir)
1623 if filter_patches: 1653 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
1624 upd_f = {}
1625 new_f = {}
1626 del_f = {}
1627 else:
1628 srctreebase = workspace[recipename]['srctreebase']
1629 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
1630
1631 remove_files = []
1632 if not no_remove:
1633 # Get all patches from source tree and check if any should be removed
1634 all_patches_dir = tempfile.mkdtemp(dir=tempdir)
1635 _, _, del_p = _export_patches(srctree, rd, initial_rev,
1636 all_patches_dir)
1637 # Remove deleted local files and patches
1638 remove_files = list(del_f.values()) + list(del_p.values())
1639 1654
1640 # Get updated patches from source tree 1655 # Get updated patches from source tree
1641 patches_dir = tempfile.mkdtemp(dir=tempdir) 1656 patches_dir = tempfile.mkdtemp(dir=tempdir)
1642 upd_p, new_p, _ = _export_patches(srctree, rd, update_rev, 1657 upd_p, new_p, _ = _export_patches(srctree, rd, update_revs,
1643 patches_dir, changed_revs) 1658 patches_dir, changed_revs)
1659 # Get all patches from source tree and check if any should be removed
1660 all_patches_dir = tempfile.mkdtemp(dir=tempdir)
1661 _, _, del_p = _export_patches(srctree, rd, initial_revs,
1662 all_patches_dir)
1644 logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p))) 1663 logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p)))
1645 if filter_patches: 1664 if filter_patches:
1646 new_p = OrderedDict() 1665 new_p = OrderedDict()
1647 upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches) 1666 upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches)
1648 remove_files = [f for f in remove_files if f in filter_patches] 1667 del_p = OrderedDict((k,v) for k,v in del_p.items() if k in filter_patches)
1668 remove_files = []
1669 if not no_remove:
1670 # Remove deleted local files and patches
1671 remove_files = list(del_f.values()) + list(del_p.values())
1649 updatefiles = False 1672 updatefiles = False
1650 updaterecipe = False 1673 updaterecipe = False
1651 destpath = None 1674 destpath = None
1652 srcuri = (rd.getVar('SRC_URI', False) or '').split() 1675 srcuri = (rd.getVar('SRC_URI', False) or '').split()
1676
1653 if appendlayerdir: 1677 if appendlayerdir:
1654 files = OrderedDict((os.path.join(local_files_dir, key), val) for 1678 files = OrderedDict((os.path.join(local_files_dir, key), val) for
1655 key, val in list(upd_f.items()) + list(new_f.items())) 1679 key, val in list(upd_f.items()) + list(new_f.items()))
1656 files.update(OrderedDict((os.path.join(patches_dir, key), val) for 1680 files.update(OrderedDict((os.path.join(patches_dir, key), val) for
1657 key, val in list(upd_p.items()) + list(new_p.items()))) 1681 key, val in list(upd_p.items()) + list(new_p.items())))
1682
1683 params = []
1684 for file, param in files.items():
1685 patchdir_param = dict(patchdir_params)
1686 patchdir = param.get('patchdir', ".")
1687 if patchdir != "." :
1688 if patchdir_param:
1689 patchdir_param['patchdir'] += patchdir
1690 else:
1691 patchdir_param['patchdir'] = patchdir
1692 params.append(patchdir_param)
1693
1658 if files or remove_files: 1694 if files or remove_files:
1659 removevalues = None 1695 removevalues = None
1660 if remove_files: 1696 if remove_files:
1661 removedentries, remaining = _remove_file_entries( 1697 removedentries, remaining = _remove_file_entries(
1662 srcuri, remove_files) 1698 srcuri, remove_files)
1663 if removedentries or remaining: 1699 if removedentries or remaining:
1664 remaining = ['file://' + os.path.basename(item) for 1700 remaining = [srcuri_entry(os.path.basename(item), patchdir_params) for
1665 item in remaining] 1701 item in remaining]
1666 removevalues = {'SRC_URI': removedentries + remaining} 1702 removevalues = {'SRC_URI': removedentries + remaining}
1667 appendfile, destpath = oe.recipeutils.bbappend_recipe( 1703 appendfile, destpath = oe.recipeutils.bbappend_recipe(
1668 rd, appendlayerdir, files, 1704 rd, appendlayerdir, files,
1669 wildcardver=wildcard_version, 1705 wildcardver=wildcard_version,
1670 removevalues=removevalues, 1706 removevalues=removevalues,
1671 redirect_output=dry_run_outdir) 1707 redirect_output=dry_run_outdir,
1708 params=params)
1672 else: 1709 else:
1673 logger.info('No patches or local source files needed updating') 1710 logger.info('No patches or local source files needed updating')
1674 else: 1711 else:
1675 # Update existing files 1712 # Update existing files
1676 files_dir = _determine_files_dir(rd) 1713 files_dir = _determine_files_dir(rd)
1677 for basepath, path in upd_f.items(): 1714 for basepath, param in upd_f.items():
1715 path = param['path']
1678 logger.info('Updating file %s' % basepath) 1716 logger.info('Updating file %s' % basepath)
1679 if os.path.isabs(basepath): 1717 if os.path.isabs(basepath):
1680 # Original file (probably with subdir pointing inside source tree) 1718 # Original file (probably with subdir pointing inside source tree)
@@ -1685,14 +1723,23 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1685 _move_file(os.path.join(local_files_dir, basepath), path, 1723 _move_file(os.path.join(local_files_dir, basepath), path,
1686 dry_run_outdir=dry_run_outdir, base_outdir=recipedir) 1724 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1687 updatefiles = True 1725 updatefiles = True
1688 for basepath, path in upd_p.items(): 1726 for basepath, param in upd_p.items():
1689 patchfn = os.path.join(patches_dir, basepath) 1727 path = param['path']
1728 patchdir = param.get('patchdir', ".")
1729 patchdir_param = {}
1730 if patchdir != "." :
1731 patchdir_param = dict(patchdir_params)
1732 if patchdir_param:
1733 patchdir_param['patchdir'] += patchdir
1734 else:
1735 patchdir_param['patchdir'] = patchdir
1736 patchfn = os.path.join(patches_dir, patchdir, basepath)
1690 if os.path.dirname(path) + '/' == dl_dir: 1737 if os.path.dirname(path) + '/' == dl_dir:
1691 # This is a a downloaded patch file - we now need to 1738 # This is a a downloaded patch file - we now need to
1692 # replace the entry in SRC_URI with our local version 1739 # replace the entry in SRC_URI with our local version
1693 logger.info('Replacing remote patch %s with updated local version' % basepath) 1740 logger.info('Replacing remote patch %s with updated local version' % basepath)
1694 path = os.path.join(files_dir, basepath) 1741 path = os.path.join(files_dir, basepath)
1695 _replace_srcuri_entry(srcuri, basepath, 'file://%s' % basepath) 1742 _replace_srcuri_entry(srcuri, basepath, srcuri_entry(basepath, patchdir_param))
1696 updaterecipe = True 1743 updaterecipe = True
1697 else: 1744 else:
1698 logger.info('Updating patch %s%s' % (basepath, dry_run_suffix)) 1745 logger.info('Updating patch %s%s' % (basepath, dry_run_suffix))
@@ -1700,21 +1747,29 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1700 dry_run_outdir=dry_run_outdir, base_outdir=recipedir) 1747 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1701 updatefiles = True 1748 updatefiles = True
1702 # Add any new files 1749 # Add any new files
1703 for basepath, path in new_f.items(): 1750 for basepath, param in new_f.items():
1704 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) 1751 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
1705 _move_file(os.path.join(local_files_dir, basepath), 1752 _move_file(os.path.join(local_files_dir, basepath),
1706 os.path.join(files_dir, basepath), 1753 os.path.join(files_dir, basepath),
1707 dry_run_outdir=dry_run_outdir, 1754 dry_run_outdir=dry_run_outdir,
1708 base_outdir=recipedir) 1755 base_outdir=recipedir)
1709 srcuri.append('file://%s' % basepath) 1756 srcuri.append(srcuri_entry(basepath, patchdir_params))
1710 updaterecipe = True 1757 updaterecipe = True
1711 for basepath, path in new_p.items(): 1758 for basepath, param in new_p.items():
1759 patchdir = param.get('patchdir', ".")
1712 logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix)) 1760 logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix))
1713 _move_file(os.path.join(patches_dir, basepath), 1761 _move_file(os.path.join(patches_dir, patchdir, basepath),
1714 os.path.join(files_dir, basepath), 1762 os.path.join(files_dir, basepath),
1715 dry_run_outdir=dry_run_outdir, 1763 dry_run_outdir=dry_run_outdir,
1716 base_outdir=recipedir) 1764 base_outdir=recipedir)
1717 srcuri.append('file://%s' % basepath) 1765 params = dict(patchdir_params)
1766 if patchdir != "." :
1767 if params:
1768 params['patchdir'] += patchdir
1769 else:
1770 params['patchdir'] = patchdir
1771
1772 srcuri.append(srcuri_entry(basepath, params))
1718 updaterecipe = True 1773 updaterecipe = True
1719 # Update recipe, if needed 1774 # Update recipe, if needed
1720 if _remove_file_entries(srcuri, remove_files)[0]: 1775 if _remove_file_entries(srcuri, remove_files)[0]:
@@ -1737,6 +1792,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1737 1792
1738def _guess_recipe_update_mode(srctree, rdata): 1793def _guess_recipe_update_mode(srctree, rdata):
1739 """Guess the recipe update mode to use""" 1794 """Guess the recipe update mode to use"""
1795 import bb.process
1740 src_uri = (rdata.getVar('SRC_URI') or '').split() 1796 src_uri = (rdata.getVar('SRC_URI') or '').split()
1741 git_uris = [uri for uri in src_uri if uri.startswith('git://')] 1797 git_uris = [uri for uri in src_uri if uri.startswith('git://')]
1742 if not git_uris: 1798 if not git_uris:
@@ -1758,6 +1814,8 @@ def _guess_recipe_update_mode(srctree, rdata):
1758 return 'patch' 1814 return 'patch'
1759 1815
1760def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): 1816def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False):
1817 import bb.data
1818 import bb.process
1761 srctree = workspace[recipename]['srctree'] 1819 srctree = workspace[recipename]['srctree']
1762 if mode == 'auto': 1820 if mode == 'auto':
1763 mode = _guess_recipe_update_mode(srctree, rd) 1821 mode = _guess_recipe_update_mode(srctree, rd)
@@ -1771,6 +1829,8 @@ def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_ver
1771 for line in stdout.splitlines(): 1829 for line in stdout.splitlines():
1772 branchname = line[2:] 1830 branchname = line[2:]
1773 if line.startswith('* '): 1831 if line.startswith('* '):
1832 if 'HEAD' in line:
1833 raise DevtoolError('Detached HEAD - please check out a branch, e.g., "devtool"')
1774 startbranch = branchname 1834 startbranch = branchname
1775 if branchname.startswith(override_branch_prefix): 1835 if branchname.startswith(override_branch_prefix):
1776 override_branches.append(branchname) 1836 override_branches.append(branchname)
@@ -1878,6 +1938,7 @@ def status(args, config, basepath, workspace):
1878 1938
1879def _reset(recipes, no_clean, remove_work, config, basepath, workspace): 1939def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
1880 """Reset one or more recipes""" 1940 """Reset one or more recipes"""
1941 import bb.process
1881 import oe.path 1942 import oe.path
1882 1943
1883 def clean_preferred_provider(pn, layerconf_path): 1944 def clean_preferred_provider(pn, layerconf_path):
@@ -1890,7 +1951,7 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
1890 lines = f.readlines() 1951 lines = f.readlines()
1891 with open(new_layerconf_file, 'a') as nf: 1952 with open(new_layerconf_file, 'a') as nf:
1892 for line in lines: 1953 for line in lines:
1893 pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + pn + r'"$' 1954 pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + re.escape(pn) + r'"$'
1894 if not re.match(pprovider_exp, line): 1955 if not re.match(pprovider_exp, line):
1895 nf.write(line) 1956 nf.write(line)
1896 else: 1957 else:
@@ -1960,9 +2021,19 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
1960 shutil.rmtree(srctreebase) 2021 shutil.rmtree(srctreebase)
1961 else: 2022 else:
1962 # We don't want to risk wiping out any work in progress 2023 # We don't want to risk wiping out any work in progress
1963 logger.info('Leaving source tree %s as-is; if you no ' 2024 if srctreebase.startswith(os.path.join(config.workspace_path, 'sources')):
1964 'longer need it then please delete it manually' 2025 from datetime import datetime
1965 % srctreebase) 2026 preservesrc = os.path.join(config.workspace_path, 'attic', 'sources', "{}.{}".format(pn, datetime.now().strftime("%Y%m%d%H%M%S")))
2027 logger.info('Preserving source tree in %s\nIf you no '
2028 'longer need it then please delete it manually.\n'
2029 'It is also possible to reuse it via devtool source tree argument.'
2030 % preservesrc)
2031 bb.utils.mkdirhier(os.path.dirname(preservesrc))
2032 shutil.move(srctreebase, preservesrc)
2033 else:
2034 logger.info('Leaving source tree %s as-is; if you no '
2035 'longer need it then please delete it manually'
2036 % srctreebase)
1966 else: 2037 else:
1967 # This is unlikely, but if it's empty we can just remove it 2038 # This is unlikely, but if it's empty we can just remove it
1968 os.rmdir(srctreebase) 2039 os.rmdir(srctreebase)
@@ -1971,8 +2042,6 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
1971 2042
1972def reset(args, config, basepath, workspace): 2043def reset(args, config, basepath, workspace):
1973 """Entry point for the devtool 'reset' subcommand""" 2044 """Entry point for the devtool 'reset' subcommand"""
1974 import bb
1975 import shutil
1976 2045
1977 recipes = "" 2046 recipes = ""
1978 2047
@@ -2222,6 +2291,7 @@ def register_commands(subparsers, context):
2222 group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") 2291 group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
2223 parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI') 2292 parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI')
2224 parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true") 2293 parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true")
2294 parser_add.add_argument('--no-pypi', help='Do not inherit pypi class', action="store_true")
2225 parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)') 2295 parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
2226 parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true") 2296 parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
2227 group = parser_add.add_mutually_exclusive_group() 2297 group = parser_add.add_mutually_exclusive_group()
@@ -2250,6 +2320,7 @@ def register_commands(subparsers, context):
2250 parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') 2320 parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")')
2251 parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') 2321 parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
2252 parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") 2322 parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true")
2323 parser_modify.add_argument('--debug-build', action="store_true", help='Add DEBUG_BUILD = "1" to the modified recipe')
2253 parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) 2324 parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup)
2254 2325
2255 parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', 2326 parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe',
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py
index 826a3f955f..d9aca6e2db 100644
--- a/scripts/lib/devtool/upgrade.py
+++ b/scripts/lib/devtool/upgrade.py
@@ -32,9 +32,11 @@ def _run(cmd, cwd=''):
32 32
33def _get_srctree(tmpdir): 33def _get_srctree(tmpdir):
34 srctree = tmpdir 34 srctree = tmpdir
35 dirs = scriptutils.filter_src_subdirs(tmpdir) 35 dirs = os.listdir(tmpdir)
36 if len(dirs) == 1: 36 if len(dirs) == 1:
37 srctree = os.path.join(tmpdir, dirs[0]) 37 srctree = os.path.join(tmpdir, dirs[0])
38 else:
39 raise DevtoolError("Cannot determine where the source tree is after unpacking in {}: {}".format(tmpdir,dirs))
38 return srctree 40 return srctree
39 41
40def _copy_source_code(orig, dest): 42def _copy_source_code(orig, dest):
@@ -74,21 +76,21 @@ def _rename_recipe_dirs(oldpv, newpv, path):
74 bb.utils.rename(os.path.join(path, oldfile), 76 bb.utils.rename(os.path.join(path, oldfile),
75 os.path.join(path, newfile)) 77 os.path.join(path, newfile))
76 78
77def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path): 79def _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path):
78 oldrecipe = os.path.basename(oldrecipe) 80 oldrecipe = os.path.basename(oldrecipe)
79 if oldrecipe.endswith('_%s.bb' % oldpv): 81 if oldrecipe.endswith('_%s.bb' % oldpv):
80 newrecipe = '%s_%s.bb' % (bpn, newpv) 82 newrecipe = '%s_%s.bb' % (pn, newpv)
81 if oldrecipe != newrecipe: 83 if oldrecipe != newrecipe:
82 shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) 84 shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe))
83 else: 85 else:
84 newrecipe = oldrecipe 86 newrecipe = oldrecipe
85 return os.path.join(path, newrecipe) 87 return os.path.join(path, newrecipe)
86 88
87def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path): 89def _rename_recipe_files(oldrecipe, pn, oldpv, newpv, path):
88 _rename_recipe_dirs(oldpv, newpv, path) 90 _rename_recipe_dirs(oldpv, newpv, path)
89 return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path) 91 return _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path)
90 92
91def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d): 93def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d):
92 """Writes an append file""" 94 """Writes an append file"""
93 if not os.path.exists(rc): 95 if not os.path.exists(rc):
94 raise DevtoolError("bbappend not created because %s does not exist" % rc) 96 raise DevtoolError("bbappend not created because %s does not exist" % rc)
@@ -104,6 +106,11 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d)
104 af = os.path.join(appendpath, '%s.bbappend' % brf) 106 af = os.path.join(appendpath, '%s.bbappend' % brf)
105 with open(af, 'w') as f: 107 with open(af, 'w') as f:
106 f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n') 108 f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n')
109 # Local files can be modified/tracked in separate subdir under srctree
110 # Mostly useful for packages with S != WORKDIR
111 f.write('FILESPATH:prepend := "%s:"\n' %
112 os.path.join(srctreebase, 'oe-local-files'))
113 f.write('# srctreebase: %s\n' % srctreebase)
107 f.write('inherit externalsrc\n') 114 f.write('inherit externalsrc\n')
108 f.write(('# NOTE: We use pn- overrides here to avoid affecting' 115 f.write(('# NOTE: We use pn- overrides here to avoid affecting'
109 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n')) 116 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n'))
@@ -112,27 +119,24 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d)
112 if b_is_s: 119 if b_is_s:
113 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) 120 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
114 f.write('\n') 121 f.write('\n')
115 if rev: 122 if revs:
116 f.write('# initial_rev: %s\n' % rev) 123 for name, rev in revs.items():
124 f.write('# initial_rev %s: %s\n' % (name, rev))
117 if copied: 125 if copied:
118 f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE'))) 126 f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE')))
119 f.write('# original_files: %s\n' % ' '.join(copied)) 127 f.write('# original_files: %s\n' % ' '.join(copied))
120 return af 128 return af
121 129
122def _cleanup_on_error(rf, srctree): 130def _cleanup_on_error(rd, srctree):
123 rfp = os.path.split(rf)[0] # recipe folder 131 if os.path.exists(rd):
124 rfpp = os.path.split(rfp)[0] # recipes folder 132 shutil.rmtree(rd)
125 if os.path.exists(rfp):
126 shutil.rmtree(rfp)
127 if not len(os.listdir(rfpp)):
128 os.rmdir(rfpp)
129 srctree = os.path.abspath(srctree) 133 srctree = os.path.abspath(srctree)
130 if os.path.exists(srctree): 134 if os.path.exists(srctree):
131 shutil.rmtree(srctree) 135 shutil.rmtree(srctree)
132 136
133def _upgrade_error(e, rf, srctree, keep_failure=False, extramsg=None): 137def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None):
134 if rf and not keep_failure: 138 if not keep_failure:
135 _cleanup_on_error(rf, srctree) 139 _cleanup_on_error(rd, srctree)
136 logger.error(e) 140 logger.error(e)
137 if extramsg: 141 if extramsg:
138 logger.error(extramsg) 142 logger.error(extramsg)
@@ -165,6 +169,7 @@ def _get_uri(rd):
165 169
166def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd): 170def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd):
167 """Extract sources of a recipe with a new version""" 171 """Extract sources of a recipe with a new version"""
172 import oe.patch
168 173
169 def __run(cmd): 174 def __run(cmd):
170 """Simple wrapper which calls _run with srctree as cwd""" 175 """Simple wrapper which calls _run with srctree as cwd"""
@@ -179,12 +184,16 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
179 uri, rev = _get_uri(crd) 184 uri, rev = _get_uri(crd)
180 if srcrev: 185 if srcrev:
181 rev = srcrev 186 rev = srcrev
187 paths = [srctree]
182 if uri.startswith('git://') or uri.startswith('gitsm://'): 188 if uri.startswith('git://') or uri.startswith('gitsm://'):
183 __run('git fetch') 189 __run('git fetch')
184 __run('git checkout %s' % rev) 190 __run('git checkout %s' % rev)
185 __run('git tag -f devtool-base-new') 191 __run('git tag -f --no-sign devtool-base-new')
186 md5 = None 192 __run('git submodule update --recursive')
187 sha256 = None 193 __run('git submodule foreach \'git tag -f --no-sign devtool-base-new\'')
194 (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'')
195 paths += [os.path.join(srctree, p) for p in stdout.splitlines()]
196 checksums = {}
188 _, _, _, _, _, params = bb.fetch2.decodeurl(uri) 197 _, _, _, _, _, params = bb.fetch2.decodeurl(uri)
189 srcsubdir_rel = params.get('destsuffix', 'git') 198 srcsubdir_rel = params.get('destsuffix', 'git')
190 if not srcbranch: 199 if not srcbranch:
@@ -192,14 +201,15 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
192 get_branch = [x.strip() for x in check_branch.splitlines()] 201 get_branch = [x.strip() for x in check_branch.splitlines()]
193 # Remove HEAD reference point and drop remote prefix 202 # Remove HEAD reference point and drop remote prefix
194 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] 203 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
195 if 'master' in get_branch: 204 if len(get_branch) == 1:
196 # If it is master, we do not need to append 'branch=master' as this is default. 205 # If srcrev is on only ONE branch, then use that branch
197 # Even with the case where get_branch has multiple objects, if 'master' is one
198 # of them, we should default take from 'master'
199 srcbranch = ''
200 elif len(get_branch) == 1:
201 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch'
202 srcbranch = get_branch[0] 206 srcbranch = get_branch[0]
207 elif 'main' in get_branch:
208 # If srcrev is on multiple branches, then choose 'main' if it is one of them
209 srcbranch = 'main'
210 elif 'master' in get_branch:
211 # Otherwise choose 'master' if it is one of the branches
212 srcbranch = 'master'
203 else: 213 else:
204 # If get_branch contains more than one objects, then display error and exit. 214 # If get_branch contains more than one objects, then display error and exit.
205 mbrch = '\n ' + '\n '.join(get_branch) 215 mbrch = '\n ' + '\n '.join(get_branch)
@@ -216,9 +226,6 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
216 if ftmpdir and keep_temp: 226 if ftmpdir and keep_temp:
217 logger.info('Fetch temp directory is %s' % ftmpdir) 227 logger.info('Fetch temp directory is %s' % ftmpdir)
218 228
219 md5 = checksums['md5sum']
220 sha256 = checksums['sha256sum']
221
222 tmpsrctree = _get_srctree(tmpdir) 229 tmpsrctree = _get_srctree(tmpdir)
223 srctree = os.path.abspath(srctree) 230 srctree = os.path.abspath(srctree)
224 srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir) 231 srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir)
@@ -250,31 +257,52 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
250 useroptions = [] 257 useroptions = []
251 oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) 258 oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
252 __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) 259 __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv))
253 __run('git tag -f devtool-base-%s' % newpv) 260 __run('git tag -f --no-sign devtool-base-%s' % newpv)
254 261
255 (stdout, _) = __run('git rev-parse HEAD') 262 revs = {}
256 rev = stdout.rstrip() 263 for path in paths:
264 (stdout, _) = _run('git rev-parse HEAD', cwd=path)
265 revs[os.path.relpath(path, srctree)] = stdout.rstrip()
257 266
258 if no_patch: 267 if no_patch:
259 patches = oe.recipeutils.get_recipe_patches(crd) 268 patches = oe.recipeutils.get_recipe_patches(crd)
260 if patches: 269 if patches:
261 logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches])) 270 logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches]))
262 else: 271 else:
263 __run('git checkout devtool-patched -b %s' % branch) 272 for path in paths:
264 (stdout, _) = __run('git branch --list devtool-override-*') 273 _run('git checkout devtool-patched -b %s' % branch, cwd=path)
265 branches_to_rebase = [branch] + stdout.split() 274 (stdout, _) = _run('git branch --list devtool-override-*', cwd=path)
266 for b in branches_to_rebase: 275 branches_to_rebase = [branch] + stdout.split()
267 logger.info("Rebasing {} onto {}".format(b, rev)) 276 target_branch = revs[os.path.relpath(path, srctree)]
268 __run('git checkout %s' % b) 277
269 try: 278 # There is a bug (or feature?) in git rebase where if a commit with
270 __run('git rebase %s' % rev) 279 # a note is fully rebased away by being part of an old commit, the
271 except bb.process.ExecutionError as e: 280 # note is still attached to the old commit. Avoid this by making
272 if 'conflict' in e.stdout: 281 # sure all old devtool related commits have a note attached to them
273 logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip())) 282 # (this assumes git config notes.rewriteMode is set to ignore).
274 __run('git rebase --abort') 283 (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
275 else: 284 for rev in stdout.splitlines():
276 logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) 285 if not oe.patch.GitApplyTree.getNotes(path, rev):
277 __run('git checkout %s' % branch) 286 oe.patch.GitApplyTree.addNote(path, rev, "dummy")
287
288 for b in branches_to_rebase:
289 logger.info("Rebasing {} onto {}".format(b, target_branch))
290 _run('git checkout %s' % b, cwd=path)
291 try:
292 _run('git rebase %s' % target_branch, cwd=path)
293 except bb.process.ExecutionError as e:
294 if 'conflict' in e.stdout:
295 logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip()))
296 _run('git rebase --abort', cwd=path)
297 else:
298 logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
299
300 # Remove any dummy notes added above.
301 (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
302 for rev in stdout.splitlines():
303 oe.patch.GitApplyTree.removeNote(path, rev, "dummy")
304
305 _run('git checkout %s' % branch, cwd=path)
278 306
279 if tmpsrctree: 307 if tmpsrctree:
280 if keep_temp: 308 if keep_temp:
@@ -284,7 +312,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
284 if tmpdir != tmpsrctree: 312 if tmpdir != tmpsrctree:
285 shutil.rmtree(tmpdir) 313 shutil.rmtree(tmpdir)
286 314
287 return (rev, md5, sha256, srcbranch, srcsubdir_rel) 315 return (revs, checksums, srcbranch, srcsubdir_rel)
288 316
289def _add_license_diff_to_recipe(path, diff): 317def _add_license_diff_to_recipe(path, diff):
290 notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'. 318 notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'.
@@ -305,22 +333,22 @@ def _add_license_diff_to_recipe(path, diff):
305 f.write("\n#\n\n".encode()) 333 f.write("\n#\n\n".encode())
306 f.write(orig_content) 334 f.write(orig_content)
307 335
308def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): 336def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
309 """Creates the new recipe under workspace""" 337 """Creates the new recipe under workspace"""
310 338
311 bpn = rd.getVar('BPN') 339 pn = rd.getVar('PN')
312 path = os.path.join(workspace, 'recipes', bpn) 340 path = os.path.join(workspace, 'recipes', pn)
313 bb.utils.mkdirhier(path) 341 bb.utils.mkdirhier(path)
314 copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) 342 copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True)
315 if not copied: 343 if not copied:
316 raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn) 344 raise DevtoolError('Internal error - no files were copied for recipe %s' % pn)
317 logger.debug('Copied %s to %s' % (copied, path)) 345 logger.debug('Copied %s to %s' % (copied, path))
318 346
319 oldpv = rd.getVar('PV') 347 oldpv = rd.getVar('PV')
320 if not newpv: 348 if not newpv:
321 newpv = oldpv 349 newpv = oldpv
322 origpath = rd.getVar('FILE') 350 origpath = rd.getVar('FILE')
323 fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path) 351 fullpath = _rename_recipe_files(origpath, pn, oldpv, newpv, path)
324 logger.debug('Upgraded %s => %s' % (origpath, fullpath)) 352 logger.debug('Upgraded %s => %s' % (origpath, fullpath))
325 353
326 newvalues = {} 354 newvalues = {}
@@ -336,7 +364,10 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
336 replacing = True 364 replacing = True
337 new_src_uri = [] 365 new_src_uri = []
338 for entry in src_uri: 366 for entry in src_uri:
339 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry) 367 try:
368 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
369 except bb.fetch2.MalformedUrl as e:
370 raise DevtoolError("Could not decode SRC_URI: {}".format(e))
340 if replacing and scheme in ['git', 'gitsm']: 371 if replacing and scheme in ['git', 'gitsm']:
341 branch = params.get('branch', 'master') 372 branch = params.get('branch', 'master')
342 if rd.expand(branch) != srcbranch: 373 if rd.expand(branch) != srcbranch:
@@ -374,30 +405,39 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
374 addnames.append(params['name']) 405 addnames.append(params['name'])
375 # Find what's been set in the original recipe 406 # Find what's been set in the original recipe
376 oldnames = [] 407 oldnames = []
408 oldsums = []
377 noname = False 409 noname = False
378 for varflag in rd.getVarFlags('SRC_URI'): 410 for varflag in rd.getVarFlags('SRC_URI'):
379 if varflag.endswith(('.md5sum', '.sha256sum')): 411 for checksum in checksums:
380 name = varflag.rsplit('.', 1)[0] 412 if varflag.endswith('.' + checksum):
381 if name not in oldnames: 413 name = varflag.rsplit('.', 1)[0]
382 oldnames.append(name) 414 if name not in oldnames:
383 elif varflag in ['md5sum', 'sha256sum']: 415 oldnames.append(name)
384 noname = True 416 oldsums.append(checksum)
417 elif varflag == checksum:
418 noname = True
419 oldsums.append(checksum)
385 # Even if SRC_URI has named entries it doesn't have to actually use the name 420 # Even if SRC_URI has named entries it doesn't have to actually use the name
386 if noname and addnames and addnames[0] not in oldnames: 421 if noname and addnames and addnames[0] not in oldnames:
387 addnames = [] 422 addnames = []
388 # Drop any old names (the name actually might include ${PV}) 423 # Drop any old names (the name actually might include ${PV})
389 for name in oldnames: 424 for name in oldnames:
390 if name not in newnames: 425 if name not in newnames:
391 newvalues['SRC_URI[%s.md5sum]' % name] = None 426 for checksum in oldsums:
392 newvalues['SRC_URI[%s.sha256sum]' % name] = None 427 newvalues['SRC_URI[%s.%s]' % (name, checksum)] = None
393 428
394 if sha256: 429 nameprefix = '%s.' % addnames[0] if addnames else ''
395 if addnames: 430
396 nameprefix = '%s.' % addnames[0] 431 # md5sum is deprecated, remove any traces of it. If it was the only old
397 else: 432 # checksum, then replace it with the default checksums.
398 nameprefix = '' 433 if 'md5sum' in oldsums:
399 newvalues['SRC_URI[%smd5sum]' % nameprefix] = None 434 newvalues['SRC_URI[%smd5sum]' % nameprefix] = None
400 newvalues['SRC_URI[%ssha256sum]' % nameprefix] = sha256 435 oldsums.remove('md5sum')
436 if not oldsums:
437 oldsums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
438
439 for checksum in oldsums:
440 newvalues['SRC_URI[%s%s]' % (nameprefix, checksum)] = checksums[checksum]
401 441
402 if srcsubdir_new != srcsubdir_old: 442 if srcsubdir_new != srcsubdir_old:
403 s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR')) 443 s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR'))
@@ -422,10 +462,11 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
422 newvalues["LIC_FILES_CHKSUM"] = newlicchksum 462 newvalues["LIC_FILES_CHKSUM"] = newlicchksum
423 _add_license_diff_to_recipe(fullpath, license_diff) 463 _add_license_diff_to_recipe(fullpath, license_diff)
424 464
465 tinfoil.modified_files()
425 try: 466 try:
426 rd = tinfoil.parse_recipe_file(fullpath, False) 467 rd = tinfoil.parse_recipe_file(fullpath, False)
427 except bb.tinfoil.TinfoilCommandFailed as e: 468 except bb.tinfoil.TinfoilCommandFailed as e:
428 _upgrade_error(e, fullpath, srctree, keep_failure, 'Parsing of upgraded recipe failed') 469 _upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed')
429 oe.recipeutils.patch_recipe(rd, fullpath, newvalues) 470 oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
430 471
431 return fullpath, copied 472 return fullpath, copied
@@ -434,7 +475,7 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
434def _check_git_config(): 475def _check_git_config():
435 def getconfig(name): 476 def getconfig(name):
436 try: 477 try:
437 value = bb.process.run('git config --global %s' % name)[0].strip() 478 value = bb.process.run('git config %s' % name)[0].strip()
438 except bb.process.ExecutionError as e: 479 except bb.process.ExecutionError as e:
439 if e.exitcode == 1: 480 if e.exitcode == 1:
440 value = None 481 value = None
@@ -494,6 +535,15 @@ def _generate_license_diff(old_licenses, new_licenses):
494 diff = diff + line 535 diff = diff + line
495 return diff 536 return diff
496 537
538def _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil):
539 tasks = []
540 for task in (rd.getVar('RECIPE_UPGRADE_EXTRA_TASKS') or '').split():
541 logger.info('Running extra recipe upgrade task: %s' % task)
542 res = tinfoil.build_targets(pn, task, handle_events=True)
543
544 if not res:
545 raise DevtoolError('Running extra recipe upgrade task %s for %s failed' % (task, pn))
546
497def upgrade(args, config, basepath, workspace): 547def upgrade(args, config, basepath, workspace):
498 """Entry point for the devtool 'upgrade' subcommand""" 548 """Entry point for the devtool 'upgrade' subcommand"""
499 549
@@ -521,14 +571,7 @@ def upgrade(args, config, basepath, workspace):
521 else: 571 else:
522 srctree = standard.get_default_srctree(config, pn) 572 srctree = standard.get_default_srctree(config, pn)
523 573
524 # Check that recipe isn't using a shared workdir 574 srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR'))
525 s = os.path.abspath(rd.getVar('S'))
526 workdir = os.path.abspath(rd.getVar('WORKDIR'))
527 srctree_s = srctree
528 if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
529 # Handle if S is set to a subdirectory of the source
530 srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
531 srctree_s = os.path.join(srctree, srcsubdir)
532 575
533 # try to automatically discover latest version and revision if not provided on command line 576 # try to automatically discover latest version and revision if not provided on command line
534 if not args.version and not args.srcrev: 577 if not args.version and not args.srcrev:
@@ -561,22 +604,23 @@ def upgrade(args, config, basepath, workspace):
561 rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) 604 rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
562 old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or "")) 605 old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
563 logger.info('Extracting upgraded version source...') 606 logger.info('Extracting upgraded version source...')
564 rev2, md5, sha256, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch, 607 rev2, checksums, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch,
565 args.srcrev, args.srcbranch, args.branch, args.keep_temp, 608 args.srcrev, args.srcbranch, args.branch, args.keep_temp,
566 tinfoil, rd) 609 tinfoil, rd)
567 new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or "")) 610 new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
568 license_diff = _generate_license_diff(old_licenses, new_licenses) 611 license_diff = _generate_license_diff(old_licenses, new_licenses)
569 rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) 612 rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
570 except bb.process.CmdError as e: 613 except (bb.process.CmdError, DevtoolError) as e:
571 _upgrade_error(e, rf, srctree, args.keep_failure) 614 recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('PN'))
572 except DevtoolError as e: 615 _upgrade_error(e, recipedir, srctree, args.keep_failure)
573 _upgrade_error(e, rf, srctree, args.keep_failure)
574 standard._add_md5(config, pn, os.path.dirname(rf)) 616 standard._add_md5(config, pn, os.path.dirname(rf))
575 617
576 af = _write_append(rf, srctree_s, args.same_dir, args.no_same_dir, rev2, 618 af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2,
577 copied, config.workspace_path, rd) 619 copied, config.workspace_path, rd)
578 standard._add_md5(config, pn, af) 620 standard._add_md5(config, pn, af)
579 621
622 _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil)
623
580 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) 624 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
581 625
582 logger.info('Upgraded source extracted to %s' % srctree) 626 logger.info('Upgraded source extracted to %s' % srctree)
@@ -611,18 +655,28 @@ def latest_version(args, config, basepath, workspace):
611 return 0 655 return 0
612 656
613def check_upgrade_status(args, config, basepath, workspace): 657def check_upgrade_status(args, config, basepath, workspace):
658 def _print_status(recipe):
659 print("{:25} {:15} {:15} {} {} {}".format( recipe['pn'],
660 recipe['cur_ver'],
661 recipe['status'] if recipe['status'] != 'UPDATE' else (recipe['next_ver'] if not recipe['next_ver'].endswith("new-commits-available") else "new commits"),
662 recipe['maintainer'],
663 recipe['revision'] if recipe['revision'] != 'N/A' else "",
664 "cannot be updated due to: %s" %(recipe['no_upgrade_reason']) if recipe['no_upgrade_reason'] else ""))
614 if not args.recipe: 665 if not args.recipe:
615 logger.info("Checking the upstream status for all recipes may take a few minutes") 666 logger.info("Checking the upstream status for all recipes may take a few minutes")
616 results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) 667 results = oe.recipeutils.get_recipe_upgrade_status(args.recipe)
617 for result in results: 668 for recipegroup in results:
618 # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason 669 upgrades = [r for r in recipegroup if r['status'] != 'MATCH']
619 if args.all or result[1] != 'MATCH': 670 currents = [r for r in recipegroup if r['status'] == 'MATCH']
620 logger.info("{:25} {:15} {:15} {} {} {}".format( result[0], 671 if len(upgrades) > 1:
621 result[2], 672 print("These recipes need to be upgraded together {")
622 result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"), 673 for r in sorted(upgrades, key=lambda r:r['pn']):
623 result[4], 674 _print_status(r)
624 result[5] if result[5] != 'N/A' else "", 675 if len(upgrades) > 1:
625 "cannot be updated due to: %s" %(result[6]) if result[6] else "")) 676 print("}")
677 for r in currents:
678 if args.all:
679 _print_status(r)
626 680
627def register_commands(subparsers, context): 681def register_commands(subparsers, context):
628 """Register devtool subcommands from this plugin""" 682 """Register devtool subcommands from this plugin"""
diff --git a/scripts/lib/devtool/utilcmds.py b/scripts/lib/devtool/utilcmds.py
index 964817766b..bf39f71b11 100644
--- a/scripts/lib/devtool/utilcmds.py
+++ b/scripts/lib/devtool/utilcmds.py
@@ -64,7 +64,7 @@ def configure_help(args, config, basepath, workspace):
64 b = rd.getVar('B') 64 b = rd.getVar('B')
65 s = rd.getVar('S') 65 s = rd.getVar('S')
66 configurescript = os.path.join(s, 'configure') 66 configurescript = os.path.join(s, 'configure')
67 confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or []) 67 confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (bb.build.listtasks(rd))
68 configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') 68 configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '')
69 extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') 69 extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '')
70 extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') 70 extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '')
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py
index 88ed8c5f01..041d79f162 100644
--- a/scripts/lib/recipetool/append.py
+++ b/scripts/lib/recipetool/append.py
@@ -18,6 +18,7 @@ import shutil
18import scriptutils 18import scriptutils
19import errno 19import errno
20from collections import defaultdict 20from collections import defaultdict
21import difflib
21 22
22logger = logging.getLogger('recipetool') 23logger = logging.getLogger('recipetool')
23 24
@@ -100,7 +101,7 @@ def determine_file_source(targetpath, rd):
100 import oe.recipeutils 101 import oe.recipeutils
101 102
102 # See if it's in do_install for the recipe 103 # See if it's in do_install for the recipe
103 workdir = rd.getVar('WORKDIR') 104 unpackdir = rd.getVar('UNPACKDIR')
104 src_uri = rd.getVar('SRC_URI') 105 src_uri = rd.getVar('SRC_URI')
105 srcfile = '' 106 srcfile = ''
106 modpatches = [] 107 modpatches = []
@@ -112,9 +113,9 @@ def determine_file_source(targetpath, rd):
112 if not srcpath.startswith('/'): 113 if not srcpath.startswith('/'):
113 # Handle non-absolute path 114 # Handle non-absolute path
114 srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath)) 115 srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath))
115 if srcpath.startswith(workdir): 116 if srcpath.startswith(unpackdir):
116 # OK, now we have the source file name, look for it in SRC_URI 117 # OK, now we have the source file name, look for it in SRC_URI
117 workdirfile = os.path.relpath(srcpath, workdir) 118 workdirfile = os.path.relpath(srcpath, unpackdir)
118 # FIXME this is where we ought to have some code in the fetcher, because this is naive 119 # FIXME this is where we ought to have some code in the fetcher, because this is naive
119 for item in src_uri.split(): 120 for item in src_uri.split():
120 localpath = bb.fetch2.localpath(item, rd) 121 localpath = bb.fetch2.localpath(item, rd)
@@ -299,7 +300,10 @@ def appendfile(args):
299 if st.st_mode & stat.S_IXUSR: 300 if st.st_mode & stat.S_IXUSR:
300 perms = '0755' 301 perms = '0755'
301 install = {args.newfile: (args.targetpath, perms)} 302 install = {args.newfile: (args.targetpath, perms)}
302 oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: sourcepath}, install, wildcardver=args.wildcard_version, machine=args.machine) 303 if sourcepath:
304 sourcepath = os.path.basename(sourcepath)
305 oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: {'newname' : sourcepath}}, install, wildcardver=args.wildcard_version, machine=args.machine)
306 tinfoil.modified_files()
303 return 0 307 return 0
304 else: 308 else:
305 if alternative_pns: 309 if alternative_pns:
@@ -313,7 +317,7 @@ def appendsrc(args, files, rd, extralines=None):
313 import oe.recipeutils 317 import oe.recipeutils
314 318
315 srcdir = rd.getVar('S') 319 srcdir = rd.getVar('S')
316 workdir = rd.getVar('WORKDIR') 320 unpackdir = rd.getVar('UNPACKDIR')
317 321
318 import bb.fetch 322 import bb.fetch
319 simplified = {} 323 simplified = {}
@@ -327,35 +331,57 @@ def appendsrc(args, files, rd, extralines=None):
327 331
328 copyfiles = {} 332 copyfiles = {}
329 extralines = extralines or [] 333 extralines = extralines or []
334 params = []
330 for newfile, srcfile in files.items(): 335 for newfile, srcfile in files.items():
331 src_destdir = os.path.dirname(srcfile) 336 src_destdir = os.path.dirname(srcfile)
332 if not args.use_workdir: 337 if not args.use_workdir:
333 if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): 338 if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'):
334 srcdir = os.path.join(workdir, 'git') 339 srcdir = os.path.join(unpackdir, rd.getVar('BB_GIT_DEFAULT_DESTSUFFIX'))
335 if not bb.data.inherits_class('kernel-yocto', rd): 340 if not bb.data.inherits_class('kernel-yocto', rd):
336 logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') 341 logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${UNPACKDIR}/${BB_GIT_DEFAULT_DESTSUFFIX}')
337 src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir) 342 src_destdir = os.path.join(os.path.relpath(srcdir, unpackdir), src_destdir)
338 src_destdir = os.path.normpath(src_destdir) 343 src_destdir = os.path.normpath(src_destdir)
339 344
340 source_uri = 'file://{0}'.format(os.path.basename(srcfile))
341 if src_destdir and src_destdir != '.': 345 if src_destdir and src_destdir != '.':
342 source_uri += ';subdir={0}'.format(src_destdir) 346 params.append({'subdir': src_destdir})
343
344 simple = bb.fetch.URI(source_uri)
345 simple.params = {}
346 simple_str = str(simple)
347 if simple_str in simplified:
348 existing = simplified[simple_str]
349 if source_uri != existing:
350 logger.warning('{0!r} is already in SRC_URI, with different parameters: {1!r}, not adding'.format(source_uri, existing))
351 else:
352 logger.warning('{0!r} is already in SRC_URI, not adding'.format(source_uri))
353 else: 347 else:
354 extralines.append('SRC_URI += {0}'.format(source_uri)) 348 params.append({})
355 copyfiles[newfile] = srcfile 349
356 350 copyfiles[newfile] = {'newname' : os.path.basename(srcfile)}
357 oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines) 351
358 352 dry_run_output = None
353 dry_run_outdir = None
354 if args.dry_run:
355 import tempfile
356 dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
357 dry_run_outdir = dry_run_output.name
358
359 appendfile, _ = oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines, params=params,
360 redirect_output=dry_run_outdir, update_original_recipe=args.update_recipe)
361 if not appendfile:
362 return
363 if args.dry_run:
364 output = ''
365 appendfilename = os.path.basename(appendfile)
366 newappendfile = appendfile
367 if appendfile and os.path.exists(appendfile):
368 with open(appendfile, 'r') as f:
369 oldlines = f.readlines()
370 else:
371 appendfile = '/dev/null'
372 oldlines = []
373
374 with open(os.path.join(dry_run_outdir, appendfilename), 'r') as f:
375 newlines = f.readlines()
376 diff = difflib.unified_diff(oldlines, newlines, appendfile, newappendfile)
377 difflines = list(diff)
378 if difflines:
379 output += ''.join(difflines)
380 if output:
381 logger.info('Diff of changed files:\n%s' % output)
382 else:
383 logger.info('No changed files')
384 tinfoil.modified_files()
359 385
360def appendsrcfiles(parser, args): 386def appendsrcfiles(parser, args):
361 recipedata = _parse_recipe(args.recipe, tinfoil) 387 recipedata = _parse_recipe(args.recipe, tinfoil)
@@ -435,6 +461,8 @@ def register_commands(subparsers):
435 help='Create/update a bbappend to add or replace source files', 461 help='Create/update a bbappend to add or replace source files',
436 description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.') 462 description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.')
437 parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path) 463 parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path)
464 parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
465 parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
438 parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path) 466 parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path)
439 parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True) 467 parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True)
440 468
@@ -442,6 +470,8 @@ def register_commands(subparsers):
442 parents=[common_src], 470 parents=[common_src],
443 help='Create/update a bbappend to add or replace a source file', 471 help='Create/update a bbappend to add or replace a source file',
444 description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.') 472 description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.')
473 parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
474 parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
445 parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path) 475 parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path)
446 parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path) 476 parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path)
447 parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True) 477 parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True)
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py
index 4f6e01c639..edb6467103 100644
--- a/scripts/lib/recipetool/create.py
+++ b/scripts/lib/recipetool/create.py
@@ -18,6 +18,7 @@ from urllib.parse import urlparse, urldefrag, urlsplit
18import hashlib 18import hashlib
19import bb.fetch2 19import bb.fetch2
20logger = logging.getLogger('recipetool') 20logger = logging.getLogger('recipetool')
21from oe.license_finder import find_licenses
21 22
22tinfoil = None 23tinfoil = None
23plugins = None 24plugins = None
@@ -389,9 +390,6 @@ def reformat_git_uri(uri):
389 parms.update({('protocol', 'ssh')}) 390 parms.update({('protocol', 'ssh')})
390 elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms): 391 elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms):
391 parms.update({('protocol', scheme)}) 392 parms.update({('protocol', scheme)})
392 # We assume 'master' branch if not set
393 if not 'branch' in parms:
394 parms.update({('branch', 'master')})
395 # Always append 'git://' 393 # Always append 'git://'
396 fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms)) 394 fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms))
397 return fUrl 395 return fUrl
@@ -426,6 +424,36 @@ def create_recipe(args):
426 storeTagName = '' 424 storeTagName = ''
427 pv_srcpv = False 425 pv_srcpv = False
428 426
427 handled = []
428 classes = []
429
430 # Find all plugins that want to register handlers
431 logger.debug('Loading recipe handlers')
432 raw_handlers = []
433 for plugin in plugins:
434 if hasattr(plugin, 'register_recipe_handlers'):
435 plugin.register_recipe_handlers(raw_handlers)
436 # Sort handlers by priority
437 handlers = []
438 for i, handler in enumerate(raw_handlers):
439 if isinstance(handler, tuple):
440 handlers.append((handler[0], handler[1], i))
441 else:
442 handlers.append((handler, 0, i))
443 handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True)
444 for handler, priority, _ in handlers:
445 logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority))
446 setattr(handler, '_devtool', args.devtool)
447 handlers = [item[0] for item in handlers]
448
449 fetchuri = None
450 for handler in handlers:
451 if hasattr(handler, 'process_url'):
452 ret = handler.process_url(args, classes, handled, extravalues)
453 if 'url' in handled and ret:
454 fetchuri = ret
455 break
456
429 if os.path.isfile(source): 457 if os.path.isfile(source):
430 source = 'file://%s' % os.path.abspath(source) 458 source = 'file://%s' % os.path.abspath(source)
431 459
@@ -434,11 +462,12 @@ def create_recipe(args):
434 if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source): 462 if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source):
435 logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).') 463 logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).')
436 # Fetch a URL 464 # Fetch a URL
437 fetchuri = reformat_git_uri(urldefrag(source)[0]) 465 if not fetchuri:
466 fetchuri = reformat_git_uri(urldefrag(source)[0])
438 if args.binary: 467 if args.binary:
439 # Assume the archive contains the directory structure verbatim 468 # Assume the archive contains the directory structure verbatim
440 # so we need to extract to a subdirectory 469 # so we need to extract to a subdirectory
441 fetchuri += ';subdir=${BP}' 470 fetchuri += ';subdir=${BPN}'
442 srcuri = fetchuri 471 srcuri = fetchuri
443 rev_re = re.compile(';rev=([^;]+)') 472 rev_re = re.compile(';rev=([^;]+)')
444 res = rev_re.search(srcuri) 473 res = rev_re.search(srcuri)
@@ -481,6 +510,9 @@ def create_recipe(args):
481 storeTagName = params['tag'] 510 storeTagName = params['tag']
482 params['nobranch'] = '1' 511 params['nobranch'] = '1'
483 del params['tag'] 512 del params['tag']
513 # Assume 'master' branch if not set
514 if scheme in ['git', 'gitsm'] and 'branch' not in params and 'nobranch' not in params:
515 params['branch'] = 'master'
484 fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) 516 fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
485 517
486 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') 518 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
@@ -497,7 +529,7 @@ def create_recipe(args):
497 if ftmpdir and args.keep_temp: 529 if ftmpdir and args.keep_temp:
498 logger.info('Fetch temp directory is %s' % ftmpdir) 530 logger.info('Fetch temp directory is %s' % ftmpdir)
499 531
500 dirlist = scriptutils.filter_src_subdirs(srctree) 532 dirlist = os.listdir(srctree)
501 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) 533 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist))
502 if len(dirlist) == 1: 534 if len(dirlist) == 1:
503 singleitem = os.path.join(srctree, dirlist[0]) 535 singleitem = os.path.join(srctree, dirlist[0])
@@ -530,10 +562,9 @@ def create_recipe(args):
530 # Remove HEAD reference point and drop remote prefix 562 # Remove HEAD reference point and drop remote prefix
531 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] 563 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
532 if 'master' in get_branch: 564 if 'master' in get_branch:
533 # If it is master, we do not need to append 'branch=master' as this is default.
534 # Even with the case where get_branch has multiple objects, if 'master' is one 565 # Even with the case where get_branch has multiple objects, if 'master' is one
535 # of them, we should default take from 'master' 566 # of them, we should default take from 'master'
536 srcbranch = '' 567 srcbranch = 'master'
537 elif len(get_branch) == 1: 568 elif len(get_branch) == 1:
538 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' 569 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch'
539 srcbranch = get_branch[0] 570 srcbranch = get_branch[0]
@@ -546,8 +577,8 @@ def create_recipe(args):
546 # Since we might have a value in srcbranch, we need to 577 # Since we might have a value in srcbranch, we need to
547 # recontruct the srcuri to include 'branch' in params. 578 # recontruct the srcuri to include 'branch' in params.
548 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri) 579 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri)
549 if srcbranch: 580 if scheme in ['git', 'gitsm']:
550 params['branch'] = srcbranch 581 params['branch'] = srcbranch or 'master'
551 582
552 if storeTagName and scheme in ['git', 'gitsm']: 583 if storeTagName and scheme in ['git', 'gitsm']:
553 # Check srcrev using tag and check validity of the tag 584 # Check srcrev using tag and check validity of the tag
@@ -606,8 +637,7 @@ def create_recipe(args):
606 splitline = line.split() 637 splitline = line.split()
607 if len(splitline) > 1: 638 if len(splitline) > 1:
608 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): 639 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]):
609 srcuri = reformat_git_uri(splitline[1]) 640 srcuri = reformat_git_uri(splitline[1]) + ';branch=master'
610 srcsubdir = 'git'
611 break 641 break
612 642
613 if args.src_subdir: 643 if args.src_subdir:
@@ -639,8 +669,6 @@ def create_recipe(args):
639 # We'll come back and replace this later in handle_license_vars() 669 # We'll come back and replace this later in handle_license_vars()
640 lines_before.append('##LICENSE_PLACEHOLDER##') 670 lines_before.append('##LICENSE_PLACEHOLDER##')
641 671
642 handled = []
643 classes = []
644 672
645 # FIXME This is kind of a hack, we probably ought to be using bitbake to do this 673 # FIXME This is kind of a hack, we probably ought to be using bitbake to do this
646 pn = None 674 pn = None
@@ -678,8 +706,10 @@ def create_recipe(args):
678 if not srcuri: 706 if not srcuri:
679 lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)') 707 lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)')
680 lines_before.append('SRC_URI = "%s"' % srcuri) 708 lines_before.append('SRC_URI = "%s"' % srcuri)
709 shown_checksums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
681 for key, value in sorted(checksums.items()): 710 for key, value in sorted(checksums.items()):
682 lines_before.append('SRC_URI[%s] = "%s"' % (key, value)) 711 if key in shown_checksums:
712 lines_before.append('SRC_URI[%s] = "%s"' % (key, value))
683 if srcuri and supports_srcrev(srcuri): 713 if srcuri and supports_srcrev(srcuri):
684 lines_before.append('') 714 lines_before.append('')
685 lines_before.append('# Modify these as desired') 715 lines_before.append('# Modify these as desired')
@@ -691,7 +721,7 @@ def create_recipe(args):
691 srcpvprefix = 'svnr' 721 srcpvprefix = 'svnr'
692 else: 722 else:
693 srcpvprefix = scheme 723 srcpvprefix = scheme
694 lines_before.append('PV = "%s+%s${SRCPV}"' % (realpv or '1.0', srcpvprefix)) 724 lines_before.append('PV = "%s+%s"' % (realpv or '1.0', srcpvprefix))
695 pv_srcpv = True 725 pv_srcpv = True
696 if not args.autorev and srcrev == '${AUTOREV}': 726 if not args.autorev and srcrev == '${AUTOREV}':
697 if os.path.exists(os.path.join(srctree, '.git')): 727 if os.path.exists(os.path.join(srctree, '.git')):
@@ -705,7 +735,7 @@ def create_recipe(args):
705 if srcsubdir and not args.binary: 735 if srcsubdir and not args.binary:
706 # (for binary packages we explicitly specify subdir= when fetching to 736 # (for binary packages we explicitly specify subdir= when fetching to
707 # match the default value of S, so we don't need to set it in that case) 737 # match the default value of S, so we don't need to set it in that case)
708 lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) 738 lines_before.append('S = "${UNPACKDIR}/%s"' % srcsubdir)
709 lines_before.append('') 739 lines_before.append('')
710 740
711 if pkgarch: 741 if pkgarch:
@@ -719,25 +749,6 @@ def create_recipe(args):
719 if args.npm_dev: 749 if args.npm_dev:
720 extravalues['NPM_INSTALL_DEV'] = 1 750 extravalues['NPM_INSTALL_DEV'] = 1
721 751
722 # Find all plugins that want to register handlers
723 logger.debug('Loading recipe handlers')
724 raw_handlers = []
725 for plugin in plugins:
726 if hasattr(plugin, 'register_recipe_handlers'):
727 plugin.register_recipe_handlers(raw_handlers)
728 # Sort handlers by priority
729 handlers = []
730 for i, handler in enumerate(raw_handlers):
731 if isinstance(handler, tuple):
732 handlers.append((handler[0], handler[1], i))
733 else:
734 handlers.append((handler, 0, i))
735 handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True)
736 for handler, priority, _ in handlers:
737 logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority))
738 setattr(handler, '_devtool', args.devtool)
739 handlers = [item[0] for item in handlers]
740
741 # Apply the handlers 752 # Apply the handlers
742 if args.binary: 753 if args.binary:
743 classes.append('bin_package') 754 classes.append('bin_package')
@@ -746,6 +757,10 @@ def create_recipe(args):
746 for handler in handlers: 757 for handler in handlers:
747 handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues) 758 handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues)
748 759
760 # native and nativesdk classes are special and must be inherited last
761 # If present, put them at the end of the classes list
762 classes.sort(key=lambda c: c in ("native", "nativesdk"))
763
749 extrafiles = extravalues.pop('extrafiles', {}) 764 extrafiles = extravalues.pop('extrafiles', {})
750 extra_pn = extravalues.pop('PN', None) 765 extra_pn = extravalues.pop('PN', None)
751 extra_pv = extravalues.pop('PV', None) 766 extra_pv = extravalues.pop('PV', None)
@@ -824,7 +839,7 @@ def create_recipe(args):
824 line = line.replace(realpv, '${PV}') 839 line = line.replace(realpv, '${PV}')
825 if pn: 840 if pn:
826 line = line.replace(pn, '${BPN}') 841 line = line.replace(pn, '${BPN}')
827 if line == 'S = "${WORKDIR}/${BPN}-${PV}"': 842 if line == 'S = "${UNPACKDIR}/${BPN}-${PV}"' or 'tmp-recipetool-' in line:
828 skipblank = True 843 skipblank = True
829 continue 844 continue
830 elif line.startswith('SRC_URI = '): 845 elif line.startswith('SRC_URI = '):
@@ -870,8 +885,10 @@ def create_recipe(args):
870 outlines.append('') 885 outlines.append('')
871 outlines.extend(lines_after) 886 outlines.extend(lines_after)
872 887
888 outlines = [ line.rstrip('\n') +"\n" for line in outlines]
889
873 if extravalues: 890 if extravalues:
874 _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=False) 891 _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=True)
875 892
876 if args.extract_to: 893 if args.extract_to:
877 scriptutils.git_convert_standalone_clone(srctree) 894 scriptutils.git_convert_standalone_clone(srctree)
@@ -887,7 +904,7 @@ def create_recipe(args):
887 log_info_cond('Source extracted to %s' % args.extract_to, args.devtool) 904 log_info_cond('Source extracted to %s' % args.extract_to, args.devtool)
888 905
889 if outfile == '-': 906 if outfile == '-':
890 sys.stdout.write('\n'.join(outlines) + '\n') 907 sys.stdout.write(''.join(outlines) + '\n')
891 else: 908 else:
892 with open(outfile, 'w') as f: 909 with open(outfile, 'w') as f:
893 lastline = None 910 lastline = None
@@ -895,9 +912,10 @@ def create_recipe(args):
895 if not lastline and not line: 912 if not lastline and not line:
896 # Skip extra blank lines 913 # Skip extra blank lines
897 continue 914 continue
898 f.write('%s\n' % line) 915 f.write('%s' % line)
899 lastline = line 916 lastline = line
900 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) 917 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool)
918 tinfoil.modified_files()
901 919
902 if tempsrc: 920 if tempsrc:
903 if args.keep_temp: 921 if args.keep_temp:
@@ -920,23 +938,42 @@ def split_value(value):
920 else: 938 else:
921 return value 939 return value
922 940
941def fixup_license(value):
942 # Ensure licenses with OR starts and ends with brackets
943 if '|' in value:
944 return '(' + value + ')'
945 return value
946
947def tidy_licenses(value):
948 """Flat, split and sort licenses"""
949 from oe.license import flattened_licenses
950 def _choose(a, b):
951 str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold)
952 return ["(%s | %s)" % (str_a, str_b)]
953 if not isinstance(value, str):
954 value = " & ".join(value)
955 return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold)
956
923def handle_license_vars(srctree, lines_before, handled, extravalues, d): 957def handle_license_vars(srctree, lines_before, handled, extravalues, d):
924 lichandled = [x for x in handled if x[0] == 'license'] 958 lichandled = [x for x in handled if x[0] == 'license']
925 if lichandled: 959 if lichandled:
926 # Someone else has already handled the license vars, just return their value 960 # Someone else has already handled the license vars, just return their value
927 return lichandled[0][1] 961 return lichandled[0][1]
928 962
929 licvalues = guess_license(srctree, d) 963 licvalues = find_licenses(srctree, d)
930 licenses = [] 964 licenses = []
931 lic_files_chksum = [] 965 lic_files_chksum = []
932 lic_unknown = [] 966 lic_unknown = []
933 lines = [] 967 lines = []
934 if licvalues: 968 if licvalues:
935 for licvalue in licvalues: 969 for licvalue in licvalues:
936 if not licvalue[0] in licenses: 970 license = licvalue[0]
937 licenses.append(licvalue[0]) 971 lics = tidy_licenses(fixup_license(license))
972 lics = [lic for lic in lics if lic not in licenses]
973 if len(lics):
974 licenses.extend(lics)
938 lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2])) 975 lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2]))
939 if licvalue[0] == 'Unknown': 976 if license == 'Unknown':
940 lic_unknown.append(licvalue[1]) 977 lic_unknown.append(licvalue[1])
941 if lic_unknown: 978 if lic_unknown:
942 lines.append('#') 979 lines.append('#')
@@ -945,9 +982,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
945 for licfile in lic_unknown: 982 for licfile in lic_unknown:
946 lines.append('# %s' % licfile) 983 lines.append('# %s' % licfile)
947 984
948 extra_license = split_value(extravalues.pop('LICENSE', [])) 985 extra_license = tidy_licenses(extravalues.pop('LICENSE', ''))
949 if '&' in extra_license:
950 extra_license.remove('&')
951 if extra_license: 986 if extra_license:
952 if licenses == ['Unknown']: 987 if licenses == ['Unknown']:
953 licenses = extra_license 988 licenses = extra_license
@@ -988,7 +1023,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
988 lines.append('# instead of &. If there is any doubt, check the accompanying documentation') 1023 lines.append('# instead of &. If there is any doubt, check the accompanying documentation')
989 lines.append('# to determine which situation is applicable.') 1024 lines.append('# to determine which situation is applicable.')
990 1025
991 lines.append('LICENSE = "%s"' % ' & '.join(licenses)) 1026 lines.append('LICENSE = "%s"' % ' & '.join(sorted(licenses, key=str.casefold)))
992 lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum)) 1027 lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum))
993 lines.append('') 1028 lines.append('')
994 1029
@@ -1005,228 +1040,15 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
1005 handled.append(('license', licvalues)) 1040 handled.append(('license', licvalues))
1006 return licvalues 1041 return licvalues
1007 1042
1008def get_license_md5sums(d, static_only=False, linenumbers=False): 1043def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'):
1009 import bb.utils
1010 import csv
1011 md5sums = {}
1012 if not static_only and not linenumbers:
1013 # Gather md5sums of license files in common license dir
1014 commonlicdir = d.getVar('COMMON_LICENSE_DIR')
1015 for fn in os.listdir(commonlicdir):
1016 md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn))
1017 md5sums[md5value] = fn
1018
1019 # The following were extracted from common values in various recipes
1020 # (double checking the license against the license file itself, not just
1021 # the LICENSE value in the recipe)
1022
1023 # Read license md5sums from csv file
1024 scripts_path = os.path.dirname(os.path.realpath(__file__))
1025 for path in (d.getVar('BBPATH').split(':')
1026 + [os.path.join(scripts_path, '..', '..')]):
1027 csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv')
1028 if os.path.isfile(csv_path):
1029 with open(csv_path, newline='') as csv_file:
1030 fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5']
1031 reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames)
1032 for row in reader:
1033 if linenumbers:
1034 md5sums[row['md5sum']] = (
1035 row['license'], row['beginline'], row['endline'], row['md5'])
1036 else:
1037 md5sums[row['md5sum']] = row['license']
1038
1039 return md5sums
1040
1041def crunch_license(licfile):
1042 '''
1043 Remove non-material text from a license file and then check
1044 its md5sum against a known list. This works well for licenses
1045 which contain a copyright statement, but is also a useful way
1046 to handle people's insistence upon reformatting the license text
1047 slightly (with no material difference to the text of the
1048 license).
1049 '''
1050
1051 import oe.utils
1052
1053 # Note: these are carefully constructed!
1054 license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$')
1055 license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
1056 copyright_re = re.compile('^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$')
1057 disclaimer_re = re.compile('^ *\*? ?All [Rr]ights [Rr]eserved\.$')
1058 email_re = re.compile('^.*<[\w\.-]*@[\w\.\-]*>$')
1059 header_re = re.compile('^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$')
1060 tag_re = re.compile('^ *@?\(?([Ll]icense|MIT)\)?$')
1061 url_re = re.compile('^ *[#\*]* *https?:\/\/[\w\.\/\-]+$')
1062
1063 crunched_md5sums = {}
1064
1065 # common licenses
1066 crunched_md5sums['89f3bf322f30a1dcfe952e09945842f0'] = 'Apache-2.0'
1067 crunched_md5sums['13b6fe3075f8f42f2270a748965bf3a1'] = 'BSD-0-Clause'
1068 crunched_md5sums['ba87a7d7c20719c8df4b8beed9b78c43'] = 'BSD-2-Clause'
1069 crunched_md5sums['7f8892c03b72de419c27be4ebfa253f8'] = 'BSD-3-Clause'
1070 crunched_md5sums['21128c0790b23a8a9f9e260d5f6b3619'] = 'BSL-1.0'
1071 crunched_md5sums['975742a59ae1b8abdea63a97121f49f4'] = 'EDL-1.0'
1072 crunched_md5sums['5322cee4433d84fb3aafc9e253116447'] = 'EPL-1.0'
1073 crunched_md5sums['6922352e87de080f42419bed93063754'] = 'EPL-2.0'
1074 crunched_md5sums['793475baa22295cae1d3d4046a3a0ceb'] = 'GPL-2.0-only'
1075 crunched_md5sums['ff9047f969b02c20f0559470df5cb433'] = 'GPL-2.0-or-later'
1076 crunched_md5sums['ea6de5453fcadf534df246e6cdafadcd'] = 'GPL-3.0-only'
1077 crunched_md5sums['b419257d4d153a6fde92ddf96acf5b67'] = 'GPL-3.0-or-later'
1078 crunched_md5sums['228737f4c49d3ee75b8fb3706b090b84'] = 'ISC'
1079 crunched_md5sums['c6a782e826ca4e85bf7f8b89435a677d'] = 'LGPL-2.0-only'
1080 crunched_md5sums['32d8f758a066752f0db09bd7624b8090'] = 'LGPL-2.0-or-later'
1081 crunched_md5sums['4820937eb198b4f84c52217ed230be33'] = 'LGPL-2.1-only'
1082 crunched_md5sums['db13fe9f3a13af7adab2dc7a76f9e44a'] = 'LGPL-2.1-or-later'
1083 crunched_md5sums['d7a0f2e4e0950e837ac3eabf5bd1d246'] = 'LGPL-3.0-only'
1084 crunched_md5sums['abbf328e2b434f9153351f06b9f79d02'] = 'LGPL-3.0-or-later'
1085 crunched_md5sums['eecf6429523cbc9693547cf2db790b5c'] = 'MIT'
1086 crunched_md5sums['b218b0e94290b9b818c4be67c8e1cc82'] = 'MIT-0'
1087 crunched_md5sums['ddc18131d6748374f0f35a621c245b49'] = 'Unlicense'
1088 crunched_md5sums['51f9570ff32571fc0a443102285c5e33'] = 'WTFPL'
1089
1090 # The following two were gleaned from the "forever" npm package
1091 crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC'
1092 # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt
1093 crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause'
1094 # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE
1095 crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPLv2'
1096 # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
1097 crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPLv2'
1098 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1
1099 crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPLv2.1'
1100 # unixODBC-2.3.4 COPYING
1101 crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPLv2.1'
1102 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3
1103 crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPLv3'
1104 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10
1105 crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0'
1106
1107 # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD
1108 crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause'
1109 # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE
1110 crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause'
1111 # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE
1112 crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause'
1113 # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE
1114 crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause'
1115 # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE
1116 crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause'
1117 # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE
1118 crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause'
1119 # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE
1120 crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause'
1121 # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE
1122 crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause'
1123 # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE
1124 crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause'
1125 # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE
1126 crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT'
1127 # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE
1128 crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT'
1129 # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE
1130 crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0'
1131 # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md
1132 crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0'
1133 # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE
1134 crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0'
1135 # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt
1136 crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0'
1137 # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE
1138 crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0'
1139 # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE
1140 crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense'
1141 # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md
1142 crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib'
1143
1144 lictext = []
1145 with open(licfile, 'r', errors='surrogateescape') as f:
1146 for line in f:
1147 # Drop opening statements
1148 if copyright_re.match(line):
1149 continue
1150 elif disclaimer_re.match(line):
1151 continue
1152 elif email_re.match(line):
1153 continue
1154 elif header_re.match(line):
1155 continue
1156 elif tag_re.match(line):
1157 continue
1158 elif url_re.match(line):
1159 continue
1160 elif license_title_re.match(line):
1161 continue
1162 elif license_statement_re.match(line):
1163 continue
1164 # Strip comment symbols
1165 line = line.replace('*', '') \
1166 .replace('#', '')
1167 # Unify spelling
1168 line = line.replace('sub-license', 'sublicense')
1169 # Squash spaces
1170 line = oe.utils.squashspaces(line.strip())
1171 # Replace smart quotes, double quotes and backticks with single quotes
1172 line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'')
1173 # Unify brackets
1174 line = line.replace("{", "[").replace("}", "]")
1175 if line:
1176 lictext.append(line)
1177
1178 m = hashlib.md5()
1179 try:
1180 m.update(' '.join(lictext).encode('utf-8'))
1181 md5val = m.hexdigest()
1182 except UnicodeEncodeError:
1183 md5val = None
1184 lictext = ''
1185 license = crunched_md5sums.get(md5val, None)
1186 return license, md5val, lictext
1187
1188def guess_license(srctree, d):
1189 import bb
1190 md5sums = get_license_md5sums(d)
1191
1192 licenses = []
1193 licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10']
1194 skip_extensions = (".html", ".js", ".json", ".svg", ".ts")
1195 licfiles = []
1196 for root, dirs, files in os.walk(srctree):
1197 for fn in files:
1198 if fn.endswith(skip_extensions):
1199 continue
1200 for spec in licspecs:
1201 if fnmatch.fnmatch(fn, spec):
1202 fullpath = os.path.join(root, fn)
1203 if not fullpath in licfiles:
1204 licfiles.append(fullpath)
1205 for licfile in licfiles:
1206 md5value = bb.utils.md5_file(licfile)
1207 license = md5sums.get(md5value, None)
1208 if not license:
1209 license, crunched_md5, lictext = crunch_license(licfile)
1210 if lictext and not license:
1211 license = 'Unknown'
1212 logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \
1213 "and replace `Unknown` with the license:\n" \
1214 "%s,Unknown" % (os.path.relpath(licfile, srctree), md5value))
1215 if license:
1216 licenses.append((license, os.path.relpath(licfile, srctree), md5value))
1217
1218 # FIXME should we grab at least one source file with a license header and add that too?
1219
1220 return licenses
1221
1222def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=[], pn='${PN}'):
1223 """ 1044 """
1224 Given a list of (license, path, md5sum) as returned by guess_license(), 1045 Given a list of (license, path, md5sum) as returned by match_licenses(),
1225 a dict of package name to path mappings, write out a set of 1046 a dict of package name to path mappings, write out a set of
1226 package-specific LICENSE values. 1047 package-specific LICENSE values.
1227 """ 1048 """
1228 pkglicenses = {pn: []} 1049 pkglicenses = {pn: []}
1229 for license, licpath, _ in licvalues: 1050 for license, licpath, _ in licvalues:
1051 license = fixup_license(license)
1230 for pkgname, pkgpath in packages.items(): 1052 for pkgname, pkgpath in packages.items():
1231 if licpath.startswith(pkgpath + '/'): 1053 if licpath.startswith(pkgpath + '/'):
1232 if pkgname in pkglicenses: 1054 if pkgname in pkglicenses:
@@ -1239,13 +1061,24 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=[], pn='
1239 pkglicenses[pn].append(license) 1061 pkglicenses[pn].append(license)
1240 outlicenses = {} 1062 outlicenses = {}
1241 for pkgname in packages: 1063 for pkgname in packages:
1242 license = ' '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown' 1064 # Assume AND operator between license files
1243 if license == 'Unknown' and pkgname in fallback_licenses: 1065 license = ' & '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown'
1066 if license == 'Unknown' and fallback_licenses and pkgname in fallback_licenses:
1244 license = fallback_licenses[pkgname] 1067 license = fallback_licenses[pkgname]
1068 licenses = tidy_licenses(license)
1069 license = ' & '.join(licenses)
1245 outlines.append('LICENSE:%s = "%s"' % (pkgname, license)) 1070 outlines.append('LICENSE:%s = "%s"' % (pkgname, license))
1246 outlicenses[pkgname] = license.split() 1071 outlicenses[pkgname] = licenses
1247 return outlicenses 1072 return outlicenses
1248 1073
1074def generate_common_licenses_chksums(common_licenses, d):
1075 lic_files_chksums = []
1076 for license in tidy_licenses(common_licenses):
1077 licfile = '${COMMON_LICENSE_DIR}/' + license
1078 md5value = bb.utils.md5_file(d.expand(licfile))
1079 lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value))
1080 return lic_files_chksums
1081
1249def read_pkgconfig_provides(d): 1082def read_pkgconfig_provides(d):
1250 pkgdatadir = d.getVar('PKGDATA_DIR') 1083 pkgdatadir = d.getVar('PKGDATA_DIR')
1251 pkgmap = {} 1084 pkgmap = {}
@@ -1376,7 +1209,7 @@ def register_commands(subparsers):
1376 parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)') 1209 parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)')
1377 parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)') 1210 parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
1378 parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies') 1211 parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies')
1212 parser_create.add_argument('--no-pypi', action="store_true", help='Do not inherit pypi class')
1379 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) 1213 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS)
1380 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') 1214 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).')
1381 parser_create.set_defaults(func=create_recipe) 1215 parser_create.set_defaults(func=create_recipe)
1382
diff --git a/scripts/lib/recipetool/create_buildsys.py b/scripts/lib/recipetool/create_buildsys.py
index 5015634476..ec9d510e23 100644
--- a/scripts/lib/recipetool/create_buildsys.py
+++ b/scripts/lib/recipetool/create_buildsys.py
@@ -5,9 +5,9 @@
5# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
6# 6#
7 7
8import os
8import re 9import re
9import logging 10import logging
10import glob
11from recipetool.create import RecipeHandler, validate_pv 11from recipetool.create import RecipeHandler, validate_pv
12 12
13logger = logging.getLogger('recipetool') 13logger = logging.getLogger('recipetool')
@@ -137,15 +137,15 @@ class CmakeRecipeHandler(RecipeHandler):
137 deps = [] 137 deps = []
138 unmappedpkgs = [] 138 unmappedpkgs = []
139 139
140 proj_re = re.compile('project\s*\(([^)]*)\)', re.IGNORECASE) 140 proj_re = re.compile(r'project\s*\(([^)]*)\)', re.IGNORECASE)
141 pkgcm_re = re.compile('pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE) 141 pkgcm_re = re.compile(r'pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE)
142 pkgsm_re = re.compile('pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE) 142 pkgsm_re = re.compile(r'pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE)
143 findpackage_re = re.compile('find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE) 143 findpackage_re = re.compile(r'find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE)
144 findlibrary_re = re.compile('find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*') 144 findlibrary_re = re.compile(r'find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*')
145 checklib_re = re.compile('check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE) 145 checklib_re = re.compile(r'check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE)
146 include_re = re.compile('include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE) 146 include_re = re.compile(r'include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE)
147 subdir_re = re.compile('add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE) 147 subdir_re = re.compile(r'add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE)
148 dep_re = re.compile('([^ ><=]+)( *[<>=]+ *[^ ><=]+)?') 148 dep_re = re.compile(r'([^ ><=]+)( *[<>=]+ *[^ ><=]+)?')
149 149
150 def find_cmake_package(pkg): 150 def find_cmake_package(pkg):
151 RecipeHandler.load_devel_filemap(tinfoil.config_data) 151 RecipeHandler.load_devel_filemap(tinfoil.config_data)
@@ -423,16 +423,16 @@ class AutotoolsRecipeHandler(RecipeHandler):
423 'makeinfo': 'texinfo', 423 'makeinfo': 'texinfo',
424 } 424 }
425 425
426 pkg_re = re.compile('PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*') 426 pkg_re = re.compile(r'PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
427 pkgce_re = re.compile('PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*') 427 pkgce_re = re.compile(r'PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*')
428 lib_re = re.compile('AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*') 428 lib_re = re.compile(r'AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*')
429 libx_re = re.compile('AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*') 429 libx_re = re.compile(r'AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*')
430 progs_re = re.compile('_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*') 430 progs_re = re.compile(r'_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
431 dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?') 431 dep_re = re.compile(r'([^ ><=]+)( [<>=]+ [^ ><=]+)?')
432 ac_init_re = re.compile('AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*') 432 ac_init_re = re.compile(r'AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*')
433 am_init_re = re.compile('AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*') 433 am_init_re = re.compile(r'AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*')
434 define_re = re.compile('\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)') 434 define_re = re.compile(r'\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)')
435 version_re = re.compile('([0-9.]+)') 435 version_re = re.compile(r'([0-9.]+)')
436 436
437 defines = {} 437 defines = {}
438 def subst_defines(value): 438 def subst_defines(value):
diff --git a/scripts/lib/recipetool/create_buildsys_python.py b/scripts/lib/recipetool/create_buildsys_python.py
index 0b6b042ed1..a807dafae5 100644
--- a/scripts/lib/recipetool/create_buildsys_python.py
+++ b/scripts/lib/recipetool/create_buildsys_python.py
@@ -8,9 +8,9 @@
8import ast 8import ast
9import codecs 9import codecs
10import collections 10import collections
11import distutils.command.build_py 11import setuptools.command.build_py
12import email 12import email
13import imp 13import importlib
14import glob 14import glob
15import itertools 15import itertools
16import logging 16import logging
@@ -18,7 +18,11 @@ import os
18import re 18import re
19import sys 19import sys
20import subprocess 20import subprocess
21import json
22import urllib.request
21from recipetool.create import RecipeHandler 23from recipetool.create import RecipeHandler
24from urllib.parse import urldefrag
25from recipetool.create import determine_from_url
22 26
23logger = logging.getLogger('recipetool') 27logger = logging.getLogger('recipetool')
24 28
@@ -37,7 +41,334 @@ class PythonRecipeHandler(RecipeHandler):
37 assume_provided = ['builtins', 'os.path'] 41 assume_provided = ['builtins', 'os.path']
38 # Assumes that the host python3 builtin_module_names is sane for target too 42 # Assumes that the host python3 builtin_module_names is sane for target too
39 assume_provided = assume_provided + list(sys.builtin_module_names) 43 assume_provided = assume_provided + list(sys.builtin_module_names)
44 excluded_fields = []
40 45
46
47 classifier_license_map = {
48 'License :: OSI Approved :: Academic Free License (AFL)': 'AFL',
49 'License :: OSI Approved :: Apache Software License': 'Apache',
50 'License :: OSI Approved :: Apple Public Source License': 'APSL',
51 'License :: OSI Approved :: Artistic License': 'Artistic',
52 'License :: OSI Approved :: Attribution Assurance License': 'AAL',
53 'License :: OSI Approved :: BSD License': 'BSD-3-Clause',
54 'License :: OSI Approved :: Boost Software License 1.0 (BSL-1.0)': 'BSL-1.0',
55 'License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)': 'CECILL-2.1',
56 'License :: OSI Approved :: Common Development and Distribution License 1.0 (CDDL-1.0)': 'CDDL-1.0',
57 'License :: OSI Approved :: Common Public License': 'CPL',
58 'License :: OSI Approved :: Eclipse Public License 1.0 (EPL-1.0)': 'EPL-1.0',
59 'License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)': 'EPL-2.0',
60 'License :: OSI Approved :: Eiffel Forum License': 'EFL',
61 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0',
62 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1',
63 'License :: OSI Approved :: European Union Public Licence 1.2 (EUPL 1.2)': 'EUPL-1.2',
64 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0-only',
65 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0-or-later',
66 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL',
67 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL',
68 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0-only',
69 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0-or-later',
70 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0-only',
71 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0-or-later',
72 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0-only',
73 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0-or-later',
74 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0-only',
75 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0-or-later',
76 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL',
77 'License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)': 'HPND',
78 'License :: OSI Approved :: IBM Public License': 'IPL',
79 'License :: OSI Approved :: ISC License (ISCL)': 'ISC',
80 'License :: OSI Approved :: Intel Open Source License': 'Intel',
81 'License :: OSI Approved :: Jabber Open Source License': 'Jabber',
82 'License :: OSI Approved :: MIT License': 'MIT',
83 'License :: OSI Approved :: MIT No Attribution License (MIT-0)': 'MIT-0',
84 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL',
85 'License :: OSI Approved :: MirOS License (MirOS)': 'MirOS',
86 'License :: OSI Approved :: Motosoto License': 'Motosoto',
87 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0',
88 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1',
89 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0',
90 'License :: OSI Approved :: Nethack General Public License': 'NGPL',
91 'License :: OSI Approved :: Nokia Open Source License': 'Nokia',
92 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL',
93 'License :: OSI Approved :: Open Software License 3.0 (OSL-3.0)': 'OSL-3.0',
94 'License :: OSI Approved :: PostgreSQL License': 'PostgreSQL',
95 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python',
96 'License :: OSI Approved :: Python Software Foundation License': 'PSF-2.0',
97 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL',
98 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL',
99 'License :: OSI Approved :: SIL Open Font License 1.1 (OFL-1.1)': 'OFL-1.1',
100 'License :: OSI Approved :: Sleepycat License': 'Sleepycat',
101 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': 'SISSL',
102 'License :: OSI Approved :: Sun Public License': 'SPL',
103 'License :: OSI Approved :: The Unlicense (Unlicense)': 'Unlicense',
104 'License :: OSI Approved :: Universal Permissive License (UPL)': 'UPL-1.0',
105 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA',
106 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0',
107 'License :: OSI Approved :: W3C License': 'W3C',
108 'License :: OSI Approved :: X.Net License': 'Xnet',
109 'License :: OSI Approved :: Zope Public License': 'ZPL',
110 'License :: OSI Approved :: zlib/libpng License': 'Zlib',
111 'License :: Other/Proprietary License': 'Proprietary',
112 'License :: Public Domain': 'PD',
113 }
114
115 def __init__(self):
116 pass
117
118 def process_url(self, args, classes, handled, extravalues):
119 """
120 Convert any pypi url https://pypi.org/project/<package>/<version> into https://files.pythonhosted.org/packages/source/...
121 which corresponds to the archive location, and add pypi class
122 """
123
124 if 'url' in handled:
125 return None
126
127 fetch_uri = None
128 source = args.source
129 required_version = args.version if args.version else None
130 match = re.match(r'https?://pypi.org/project/([^/]+)(?:/([^/]+))?/?$', urldefrag(source)[0])
131 if match:
132 package = match.group(1)
133 version = match.group(2) if match.group(2) else required_version
134
135 json_url = f"https://pypi.org/pypi/%s/json" % package
136 response = urllib.request.urlopen(json_url)
137 if response.status == 200:
138 data = json.loads(response.read())
139 if not version:
140 # grab latest version
141 version = data["info"]["version"]
142 pypi_package = data["info"]["name"]
143 for release in reversed(data["releases"][version]):
144 if release["packagetype"] == "sdist":
145 fetch_uri = release["url"]
146 break
147 else:
148 logger.warning("Cannot handle pypi url %s: cannot fetch package information using %s", source, json_url)
149 return None
150 else:
151 match = re.match(r'^https?://files.pythonhosted.org/packages.*/(.*)-.*$', source)
152 if match:
153 fetch_uri = source
154 pypi_package = match.group(1)
155 _, version = determine_from_url(fetch_uri)
156
157 if match and not args.no_pypi:
158 if required_version and version != required_version:
159 raise Exception("Version specified using --version/-V (%s) and version specified in the url (%s) do not match" % (required_version, version))
160 # This is optionnal if BPN looks like "python-<pypi_package>" or "python3-<pypi_package>" (see pypi.bbclass)
161 # but at this point we cannot know because because user can specify the output name of the recipe on the command line
162 extravalues["PYPI_PACKAGE"] = pypi_package
163 # If the tarball extension is not 'tar.gz' (default value in pypi.bblcass) whe should set PYPI_PACKAGE_EXT in the recipe
164 pypi_package_ext = re.match(r'.*%s-%s\.(.*)$' % (pypi_package, version), fetch_uri)
165 if pypi_package_ext:
166 pypi_package_ext = pypi_package_ext.group(1)
167 if pypi_package_ext != "tar.gz":
168 extravalues["PYPI_PACKAGE_EXT"] = pypi_package_ext
169
170 # Pypi class will handle S and SRC_URI variables, so remove them
171 # TODO: allow oe.recipeutils.patch_recipe_lines() to accept regexp so we can simplify the following to:
172 # extravalues['SRC_URI(?:\[.*?\])?'] = None
173 extravalues['S'] = None
174 extravalues['SRC_URI'] = None
175
176 classes.append('pypi')
177
178 handled.append('url')
179 return fetch_uri
180
181 def handle_classifier_license(self, classifiers, existing_licenses=""):
182
183 licenses = []
184 for classifier in classifiers:
185 if classifier in self.classifier_license_map:
186 license = self.classifier_license_map[classifier]
187 if license == 'Apache' and 'Apache-2.0' in existing_licenses:
188 license = 'Apache-2.0'
189 elif license == 'GPL':
190 if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses:
191 license = 'GPL-2.0'
192 elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses:
193 license = 'GPL-3.0'
194 elif license == 'LGPL':
195 if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses:
196 license = 'LGPL-2.1'
197 elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses:
198 license = 'LGPL-2.0'
199 elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses:
200 license = 'LGPL-3.0'
201 licenses.append(license)
202
203 if licenses:
204 return ' & '.join(licenses)
205
206 return None
207
208 def map_info_to_bbvar(self, info, extravalues):
209
210 # Map PKG-INFO & setup.py fields to bitbake variables
211 for field, values in info.items():
212 if field in self.excluded_fields:
213 continue
214
215 if field not in self.bbvar_map:
216 continue
217
218 if isinstance(values, str):
219 value = values
220 else:
221 value = ' '.join(str(v) for v in values if v)
222
223 bbvar = self.bbvar_map[field]
224 if bbvar == "PN":
225 # by convention python recipes start with "python3-"
226 if not value.startswith('python'):
227 value = 'python3-' + value
228
229 if bbvar not in extravalues and value:
230 extravalues[bbvar] = value
231
232 def apply_info_replacements(self, info):
233 if not self.replacements:
234 return
235
236 for variable, search, replace in self.replacements:
237 if variable not in info:
238 continue
239
240 def replace_value(search, replace, value):
241 if replace is None:
242 if re.search(search, value):
243 return None
244 else:
245 new_value = re.sub(search, replace, value)
246 if value != new_value:
247 return new_value
248 return value
249
250 value = info[variable]
251 if isinstance(value, str):
252 new_value = replace_value(search, replace, value)
253 if new_value is None:
254 del info[variable]
255 elif new_value != value:
256 info[variable] = new_value
257 elif hasattr(value, 'items'):
258 for dkey, dvalue in list(value.items()):
259 new_list = []
260 for pos, a_value in enumerate(dvalue):
261 new_value = replace_value(search, replace, a_value)
262 if new_value is not None and new_value != value:
263 new_list.append(new_value)
264
265 if value != new_list:
266 value[dkey] = new_list
267 else:
268 new_list = []
269 for pos, a_value in enumerate(value):
270 new_value = replace_value(search, replace, a_value)
271 if new_value is not None and new_value != value:
272 new_list.append(new_value)
273
274 if value != new_list:
275 info[variable] = new_list
276
277
278 def scan_python_dependencies(self, paths):
279 deps = set()
280 try:
281 dep_output = self.run_command(['pythondeps', '-d'] + paths)
282 except (OSError, subprocess.CalledProcessError):
283 pass
284 else:
285 for line in dep_output.splitlines():
286 line = line.rstrip()
287 dep, filename = line.split('\t', 1)
288 if filename.endswith('/setup.py'):
289 continue
290 deps.add(dep)
291
292 try:
293 provides_output = self.run_command(['pythondeps', '-p'] + paths)
294 except (OSError, subprocess.CalledProcessError):
295 pass
296 else:
297 provides_lines = (l.rstrip() for l in provides_output.splitlines())
298 provides = set(l for l in provides_lines if l and l != 'setup')
299 deps -= provides
300
301 return deps
302
303 def parse_pkgdata_for_python_packages(self):
304 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
305
306 ldata = tinfoil.config_data.createCopy()
307 bb.parse.handle('classes-recipe/python3-dir.bbclass', ldata, True)
308 python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
309
310 dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload')
311 python_dirs = [python_sitedir + os.sep,
312 os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep,
313 os.path.dirname(python_sitedir) + os.sep]
314 packages = {}
315 for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)):
316 files_info = None
317 with open(pkgdatafile, 'r') as f:
318 for line in f.readlines():
319 field, value = line.split(': ', 1)
320 if field.startswith('FILES_INFO'):
321 files_info = ast.literal_eval(value)
322 break
323 else:
324 continue
325
326 for fn in files_info:
327 for suffix in importlib.machinery.all_suffixes():
328 if fn.endswith(suffix):
329 break
330 else:
331 continue
332
333 if fn.startswith(dynload_dir + os.sep):
334 if '/.debug/' in fn:
335 continue
336 base = os.path.basename(fn)
337 provided = base.split('.', 1)[0]
338 packages[provided] = os.path.basename(pkgdatafile)
339 continue
340
341 for python_dir in python_dirs:
342 if fn.startswith(python_dir):
343 relpath = fn[len(python_dir):]
344 relstart, _, relremaining = relpath.partition(os.sep)
345 if relstart.endswith('.egg'):
346 relpath = relremaining
347 base, _ = os.path.splitext(relpath)
348
349 if '/.debug/' in base:
350 continue
351 if os.path.basename(base) == '__init__':
352 base = os.path.dirname(base)
353 base = base.replace(os.sep + os.sep, os.sep)
354 provided = base.replace(os.sep, '.')
355 packages[provided] = os.path.basename(pkgdatafile)
356 return packages
357
358 @classmethod
359 def run_command(cls, cmd, **popenargs):
360 if 'stderr' not in popenargs:
361 popenargs['stderr'] = subprocess.STDOUT
362 try:
363 return subprocess.check_output(cmd, **popenargs).decode('utf-8')
364 except OSError as exc:
365 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc)
366 raise
367 except subprocess.CalledProcessError as exc:
368 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output)
369 raise
370
371class PythonSetupPyRecipeHandler(PythonRecipeHandler):
41 bbvar_map = { 372 bbvar_map = {
42 'Name': 'PN', 373 'Name': 'PN',
43 'Version': 'PV', 374 'Version': 'PV',
@@ -75,6 +406,7 @@ class PythonRecipeHandler(RecipeHandler):
75 'Supported-Platform', 406 'Supported-Platform',
76 ] 407 ]
77 setuparg_multi_line_values = ['Description'] 408 setuparg_multi_line_values = ['Description']
409
78 replacements = [ 410 replacements = [
79 ('License', r' +$', ''), 411 ('License', r' +$', ''),
80 ('License', r'^ +', ''), 412 ('License', r'^ +', ''),
@@ -95,71 +427,161 @@ class PythonRecipeHandler(RecipeHandler):
95 ('Install-requires', r'\[[^\]]+\]$', ''), 427 ('Install-requires', r'\[[^\]]+\]$', ''),
96 ] 428 ]
97 429
98 classifier_license_map = {
99 'License :: OSI Approved :: Academic Free License (AFL)': 'AFL',
100 'License :: OSI Approved :: Apache Software License': 'Apache',
101 'License :: OSI Approved :: Apple Public Source License': 'APSL',
102 'License :: OSI Approved :: Artistic License': 'Artistic',
103 'License :: OSI Approved :: Attribution Assurance License': 'AAL',
104 'License :: OSI Approved :: BSD License': 'BSD-3-Clause',
105 'License :: OSI Approved :: Common Public License': 'CPL',
106 'License :: OSI Approved :: Eiffel Forum License': 'EFL',
107 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0',
108 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1',
109 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0+',
110 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0',
111 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL',
112 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL',
113 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0',
114 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0+',
115 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0',
116 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0+',
117 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0',
118 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0+',
119 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0',
120 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0+',
121 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL',
122 'License :: OSI Approved :: IBM Public License': 'IPL',
123 'License :: OSI Approved :: ISC License (ISCL)': 'ISC',
124 'License :: OSI Approved :: Intel Open Source License': 'Intel',
125 'License :: OSI Approved :: Jabber Open Source License': 'Jabber',
126 'License :: OSI Approved :: MIT License': 'MIT',
127 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL',
128 'License :: OSI Approved :: Motosoto License': 'Motosoto',
129 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0',
130 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1',
131 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0',
132 'License :: OSI Approved :: Nethack General Public License': 'NGPL',
133 'License :: OSI Approved :: Nokia Open Source License': 'Nokia',
134 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL',
135 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python',
136 'License :: OSI Approved :: Python Software Foundation License': 'PSF',
137 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL',
138 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL',
139 'License :: OSI Approved :: Sleepycat License': 'Sleepycat',
140 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': '-- Sun Industry Standards Source License (SISSL)',
141 'License :: OSI Approved :: Sun Public License': 'SPL',
142 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA',
143 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0',
144 'License :: OSI Approved :: W3C License': 'W3C',
145 'License :: OSI Approved :: X.Net License': 'Xnet',
146 'License :: OSI Approved :: Zope Public License': 'ZPL',
147 'License :: OSI Approved :: zlib/libpng License': 'Zlib',
148 }
149
150 def __init__(self): 430 def __init__(self):
151 pass 431 pass
152 432
433 def parse_setup_py(self, setupscript='./setup.py'):
434 with codecs.open(setupscript) as f:
435 info, imported_modules, non_literals, extensions = gather_setup_info(f)
436
437 def _map(key):
438 key = key.replace('_', '-')
439 key = key[0].upper() + key[1:]
440 if key in self.setup_parse_map:
441 key = self.setup_parse_map[key]
442 return key
443
444 # Naive mapping of setup() arguments to PKG-INFO field names
445 for d in [info, non_literals]:
446 for key, value in list(d.items()):
447 if key is None:
448 continue
449 new_key = _map(key)
450 if new_key != key:
451 del d[key]
452 d[new_key] = value
453
454 return info, 'setuptools' in imported_modules, non_literals, extensions
455
456 def get_setup_args_info(self, setupscript='./setup.py'):
457 cmd = ['python3', setupscript]
458 info = {}
459 keys = set(self.bbvar_map.keys())
460 keys |= set(self.setuparg_list_fields)
461 keys |= set(self.setuparg_multi_line_values)
462 grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values))
463 for index, keys in grouped_keys:
464 if index == (True, False):
465 # Splitlines output for each arg as a list value
466 for key in keys:
467 arg = self.setuparg_map.get(key, key.lower())
468 try:
469 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
470 except (OSError, subprocess.CalledProcessError):
471 pass
472 else:
473 info[key] = [l.rstrip() for l in arg_info.splitlines()]
474 elif index == (False, True):
475 # Entire output for each arg
476 for key in keys:
477 arg = self.setuparg_map.get(key, key.lower())
478 try:
479 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
480 except (OSError, subprocess.CalledProcessError):
481 pass
482 else:
483 info[key] = arg_info
484 else:
485 info.update(self.get_setup_byline(list(keys), setupscript))
486 return info
487
488 def get_setup_byline(self, fields, setupscript='./setup.py'):
489 info = {}
490
491 cmd = ['python3', setupscript]
492 cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields)
493 try:
494 info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines()
495 except (OSError, subprocess.CalledProcessError):
496 pass
497 else:
498 if len(fields) != len(info_lines):
499 logger.error('Mismatch between setup.py output lines and number of fields')
500 sys.exit(1)
501
502 for lineno, line in enumerate(info_lines):
503 line = line.rstrip()
504 info[fields[lineno]] = line
505 return info
506
507 def get_pkginfo(self, pkginfo_fn):
508 msg = email.message_from_file(open(pkginfo_fn, 'r'))
509 msginfo = {}
510 for field in msg.keys():
511 values = msg.get_all(field)
512 if len(values) == 1:
513 msginfo[field] = values[0]
514 else:
515 msginfo[field] = values
516 return msginfo
517
518 def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals):
519 if 'Package-dir' in setup_info:
520 package_dir = setup_info['Package-dir']
521 else:
522 package_dir = {}
523
524 dist = setuptools.Distribution()
525
526 class PackageDir(setuptools.command.build_py.build_py):
527 def __init__(self, package_dir):
528 self.package_dir = package_dir
529 self.dist = dist
530 super().__init__(self.dist)
531
532 pd = PackageDir(package_dir)
533 to_scan = []
534 if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']):
535 if 'Py-modules' in setup_info:
536 for module in setup_info['Py-modules']:
537 try:
538 package, module = module.rsplit('.', 1)
539 except ValueError:
540 package, module = '.', module
541 module_path = os.path.join(pd.get_package_dir(package), module + '.py')
542 to_scan.append(module_path)
543
544 if 'Packages' in setup_info:
545 for package in setup_info['Packages']:
546 to_scan.append(pd.get_package_dir(package))
547
548 if 'Scripts' in setup_info:
549 to_scan.extend(setup_info['Scripts'])
550 else:
551 logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.")
552
553 if not to_scan:
554 to_scan = ['.']
555
556 logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan))
557
558 provided_packages = self.parse_pkgdata_for_python_packages()
559 scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan])
560 mapped_deps, unmapped_deps = set(self.base_pkgdeps), set()
561 for dep in scanned_deps:
562 mapped = provided_packages.get(dep)
563 if mapped:
564 logger.debug('Mapped %s to %s' % (dep, mapped))
565 mapped_deps.add(mapped)
566 else:
567 logger.debug('Could not map %s' % dep)
568 unmapped_deps.add(dep)
569 return mapped_deps, unmapped_deps
570
153 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): 571 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
572
154 if 'buildsystem' in handled: 573 if 'buildsystem' in handled:
155 return False 574 return False
156 575
576 logger.debug("Trying setup.py parser")
577
157 # Check for non-zero size setup.py files 578 # Check for non-zero size setup.py files
158 setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py']) 579 setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py'])
159 for fn in setupfiles: 580 for fn in setupfiles:
160 if os.path.getsize(fn): 581 if os.path.getsize(fn):
161 break 582 break
162 else: 583 else:
584 logger.debug("No setup.py found")
163 return False 585 return False
164 586
165 # setup.py is always parsed to get at certain required information, such as 587 # setup.py is always parsed to get at certain required information, such as
@@ -193,6 +615,18 @@ class PythonRecipeHandler(RecipeHandler):
193 continue 615 continue
194 616
195 if line.startswith('['): 617 if line.startswith('['):
618 # PACKAGECONFIG must not contain expressions or whitespace
619 line = line.replace(" ", "")
620 line = line.replace(':', "")
621 line = line.replace('.', "-dot-")
622 line = line.replace('"', "")
623 line = line.replace('<', "-smaller-")
624 line = line.replace('>', "-bigger-")
625 line = line.replace('_', "-")
626 line = line.replace('(', "")
627 line = line.replace(')', "")
628 line = line.replace('!', "-not-")
629 line = line.replace('=', "-equals-")
196 current_feature = line[1:-1] 630 current_feature = line[1:-1]
197 elif current_feature: 631 elif current_feature:
198 extras_req[current_feature].append(line) 632 extras_req[current_feature].append(line)
@@ -226,51 +660,16 @@ class PythonRecipeHandler(RecipeHandler):
226 660
227 if license_str: 661 if license_str:
228 for i, line in enumerate(lines_before): 662 for i, line in enumerate(lines_before):
229 if line.startswith('LICENSE = '): 663 if line.startswith('##LICENSE_PLACEHOLDER##'):
230 lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str) 664 lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str)
231 break 665 break
232 666
233 if 'Classifier' in info: 667 if 'Classifier' in info:
234 existing_licenses = info.get('License', '') 668 license = self.handle_classifier_license(info['Classifier'], info.get('License', ''))
235 licenses = [] 669 if license:
236 for classifier in info['Classifier']: 670 info['License'] = license
237 if classifier in self.classifier_license_map:
238 license = self.classifier_license_map[classifier]
239 if license == 'Apache' and 'Apache-2.0' in existing_licenses:
240 license = 'Apache-2.0'
241 elif license == 'GPL':
242 if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses:
243 license = 'GPL-2.0'
244 elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses:
245 license = 'GPL-3.0'
246 elif license == 'LGPL':
247 if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses:
248 license = 'LGPL-2.1'
249 elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses:
250 license = 'LGPL-2.0'
251 elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses:
252 license = 'LGPL-3.0'
253 licenses.append(license)
254
255 if licenses:
256 info['License'] = ' & '.join(licenses)
257 671
258 # Map PKG-INFO & setup.py fields to bitbake variables 672 self.map_info_to_bbvar(info, extravalues)
259 for field, values in info.items():
260 if field in self.excluded_fields:
261 continue
262
263 if field not in self.bbvar_map:
264 continue
265
266 if isinstance(values, str):
267 value = values
268 else:
269 value = ' '.join(str(v) for v in values if v)
270
271 bbvar = self.bbvar_map[field]
272 if bbvar not in extravalues and value:
273 extravalues[bbvar] = value
274 673
275 mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals) 674 mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals)
276 675
@@ -281,6 +680,7 @@ class PythonRecipeHandler(RecipeHandler):
281 lines_after.append('# The following configs & dependencies are from setuptools extras_require.') 680 lines_after.append('# The following configs & dependencies are from setuptools extras_require.')
282 lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.') 681 lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.')
283 lines_after.append('# The upstream names may not correspond exactly to bitbake package names.') 682 lines_after.append('# The upstream names may not correspond exactly to bitbake package names.')
683 lines_after.append('# The configs are might not correct, since PACKAGECONFIG does not support expressions as may used in requires.txt - they are just replaced by text.')
284 lines_after.append('#') 684 lines_after.append('#')
285 lines_after.append('# Uncomment this line to enable all the optional features.') 685 lines_after.append('# Uncomment this line to enable all the optional features.')
286 lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req))) 686 lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req)))
@@ -326,275 +726,283 @@ class PythonRecipeHandler(RecipeHandler):
326 726
327 handled.append('buildsystem') 727 handled.append('buildsystem')
328 728
329 def get_pkginfo(self, pkginfo_fn): 729class PythonPyprojectTomlRecipeHandler(PythonRecipeHandler):
330 msg = email.message_from_file(open(pkginfo_fn, 'r')) 730 """Base class to support PEP517 and PEP518
331 msginfo = {} 731
332 for field in msg.keys(): 732 PEP517 https://peps.python.org/pep-0517/#source-trees
333 values = msg.get_all(field) 733 PEP518 https://peps.python.org/pep-0518/#build-system-table
334 if len(values) == 1: 734 """
335 msginfo[field] = values[0] 735 # bitbake currently supports the 4 following backends
336 else: 736 build_backend_map = {
337 msginfo[field] = values 737 "setuptools.build_meta": "python_setuptools_build_meta",
338 return msginfo 738 "poetry.core.masonry.api": "python_poetry_core",
739 "flit_core.buildapi": "python_flit_core",
740 "hatchling.build": "python_hatchling",
741 "maturin": "python_maturin",
742 "mesonpy": "python_mesonpy",
743 }
339 744
340 def parse_setup_py(self, setupscript='./setup.py'): 745 # setuptools.build_meta and flit declare project metadata into the "project" section of pyproject.toml
341 with codecs.open(setupscript) as f: 746 # according to PEP-621: https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#declaring-project-metadata
342 info, imported_modules, non_literals, extensions = gather_setup_info(f) 747 # while poetry uses the "tool.poetry" section according to its official documentation: https://python-poetry.org/docs/pyproject/
748 # keys from "project" and "tool.poetry" sections are almost the same except for the HOMEPAGE which is "homepage" for tool.poetry
749 # and "Homepage" for "project" section. So keep both
750 bbvar_map = {
751 "name": "PN",
752 "version": "PV",
753 "Homepage": "HOMEPAGE",
754 "homepage": "HOMEPAGE",
755 "description": "SUMMARY",
756 "license": "LICENSE",
757 "dependencies": "RDEPENDS:${PN}",
758 "requires": "DEPENDS",
759 }
343 760
344 def _map(key): 761 replacements = [
345 key = key.replace('_', '-') 762 ("license", r" +$", ""),
346 key = key[0].upper() + key[1:] 763 ("license", r"^ +", ""),
347 if key in self.setup_parse_map: 764 ("license", r" ", "-"),
348 key = self.setup_parse_map[key] 765 ("license", r"^GNU-", ""),
349 return key 766 ("license", r"-[Ll]icen[cs]e(,?-[Vv]ersion)?", ""),
767 ("license", r"^UNKNOWN$", ""),
768 # Remove currently unhandled version numbers from these variables
769 ("requires", r"\[[^\]]+\]$", ""),
770 ("requires", r"^([^><= ]+).*", r"\1"),
771 ("dependencies", r"\[[^\]]+\]$", ""),
772 ("dependencies", r"^([^><= ]+).*", r"\1"),
773 ]
350 774
351 # Naive mapping of setup() arguments to PKG-INFO field names 775 excluded_native_pkgdeps = [
352 for d in [info, non_literals]: 776 # already provided by python_setuptools_build_meta.bbclass
353 for key, value in list(d.items()): 777 "python3-setuptools-native",
354 if key is None: 778 "python3-wheel-native",
355 continue 779 # already provided by python_poetry_core.bbclass
356 new_key = _map(key) 780 "python3-poetry-core-native",
357 if new_key != key: 781 # already provided by python_flit_core.bbclass
358 del d[key] 782 "python3-flit-core-native",
359 d[new_key] = value 783 # already provided by python_mesonpy
784 "python3-meson-python-native",
785 ]
360 786
361 return info, 'setuptools' in imported_modules, non_literals, extensions 787 # add here a list of known and often used packages and the corresponding bitbake package
788 known_deps_map = {
789 "setuptools": "python3-setuptools",
790 "wheel": "python3-wheel",
791 "poetry-core": "python3-poetry-core",
792 "flit_core": "python3-flit-core",
793 "setuptools-scm": "python3-setuptools-scm",
794 "hatchling": "python3-hatchling",
795 "hatch-vcs": "python3-hatch-vcs",
796 "meson-python" : "python3-meson-python",
797 }
362 798
363 def get_setup_args_info(self, setupscript='./setup.py'): 799 def __init__(self):
364 cmd = ['python3', setupscript] 800 pass
365 info = {}
366 keys = set(self.bbvar_map.keys())
367 keys |= set(self.setuparg_list_fields)
368 keys |= set(self.setuparg_multi_line_values)
369 grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values))
370 for index, keys in grouped_keys:
371 if index == (True, False):
372 # Splitlines output for each arg as a list value
373 for key in keys:
374 arg = self.setuparg_map.get(key, key.lower())
375 try:
376 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
377 except (OSError, subprocess.CalledProcessError):
378 pass
379 else:
380 info[key] = [l.rstrip() for l in arg_info.splitlines()]
381 elif index == (False, True):
382 # Entire output for each arg
383 for key in keys:
384 arg = self.setuparg_map.get(key, key.lower())
385 try:
386 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
387 except (OSError, subprocess.CalledProcessError):
388 pass
389 else:
390 info[key] = arg_info
391 else:
392 info.update(self.get_setup_byline(list(keys), setupscript))
393 return info
394 801
395 def get_setup_byline(self, fields, setupscript='./setup.py'): 802 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
396 info = {} 803 info = {}
804 metadata = {}
397 805
398 cmd = ['python3', setupscript] 806 if 'buildsystem' in handled:
399 cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields) 807 return False
400 try:
401 info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines()
402 except (OSError, subprocess.CalledProcessError):
403 pass
404 else:
405 if len(fields) != len(info_lines):
406 logger.error('Mismatch between setup.py output lines and number of fields')
407 sys.exit(1)
408
409 for lineno, line in enumerate(info_lines):
410 line = line.rstrip()
411 info[fields[lineno]] = line
412 return info
413
414 def apply_info_replacements(self, info):
415 for variable, search, replace in self.replacements:
416 if variable not in info:
417 continue
418
419 def replace_value(search, replace, value):
420 if replace is None:
421 if re.search(search, value):
422 return None
423 else:
424 new_value = re.sub(search, replace, value)
425 if value != new_value:
426 return new_value
427 return value
428
429 value = info[variable]
430 if isinstance(value, str):
431 new_value = replace_value(search, replace, value)
432 if new_value is None:
433 del info[variable]
434 elif new_value != value:
435 info[variable] = new_value
436 elif hasattr(value, 'items'):
437 for dkey, dvalue in list(value.items()):
438 new_list = []
439 for pos, a_value in enumerate(dvalue):
440 new_value = replace_value(search, replace, a_value)
441 if new_value is not None and new_value != value:
442 new_list.append(new_value)
443
444 if value != new_list:
445 value[dkey] = new_list
446 else:
447 new_list = []
448 for pos, a_value in enumerate(value):
449 new_value = replace_value(search, replace, a_value)
450 if new_value is not None and new_value != value:
451 new_list.append(new_value)
452
453 if value != new_list:
454 info[variable] = new_list
455
456 def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals):
457 if 'Package-dir' in setup_info:
458 package_dir = setup_info['Package-dir']
459 else:
460 package_dir = {}
461
462 class PackageDir(distutils.command.build_py.build_py):
463 def __init__(self, package_dir):
464 self.package_dir = package_dir
465
466 pd = PackageDir(package_dir)
467 to_scan = []
468 if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']):
469 if 'Py-modules' in setup_info:
470 for module in setup_info['Py-modules']:
471 try:
472 package, module = module.rsplit('.', 1)
473 except ValueError:
474 package, module = '.', module
475 module_path = os.path.join(pd.get_package_dir(package), module + '.py')
476 to_scan.append(module_path)
477 808
478 if 'Packages' in setup_info: 809 logger.debug("Trying pyproject.toml parser")
479 for package in setup_info['Packages']:
480 to_scan.append(pd.get_package_dir(package))
481 810
482 if 'Scripts' in setup_info: 811 # Check for non-zero size setup.py files
483 to_scan.extend(setup_info['Scripts']) 812 setupfiles = RecipeHandler.checkfiles(srctree, ["pyproject.toml"])
813 for fn in setupfiles:
814 if os.path.getsize(fn):
815 break
484 else: 816 else:
485 logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.") 817 logger.debug("No pyproject.toml found")
486 818 return False
487 if not to_scan:
488 to_scan = ['.']
489
490 logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan))
491 819
492 provided_packages = self.parse_pkgdata_for_python_packages() 820 setupscript = os.path.join(srctree, "pyproject.toml")
493 scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan])
494 mapped_deps, unmapped_deps = set(self.base_pkgdeps), set()
495 for dep in scanned_deps:
496 mapped = provided_packages.get(dep)
497 if mapped:
498 logger.debug('Mapped %s to %s' % (dep, mapped))
499 mapped_deps.add(mapped)
500 else:
501 logger.debug('Could not map %s' % dep)
502 unmapped_deps.add(dep)
503 return mapped_deps, unmapped_deps
504 821
505 def scan_python_dependencies(self, paths):
506 deps = set()
507 try: 822 try:
508 dep_output = self.run_command(['pythondeps', '-d'] + paths) 823 try:
509 except (OSError, subprocess.CalledProcessError): 824 import tomllib
510 pass 825 except ImportError:
511 else: 826 try:
512 for line in dep_output.splitlines(): 827 import tomli as tomllib
513 line = line.rstrip() 828 except ImportError:
514 dep, filename = line.split('\t', 1) 829 logger.error("Neither 'tomllib' nor 'tomli' could be imported, cannot scan pyproject.toml.")
515 if filename.endswith('/setup.py'): 830 return False
516 continue 831
517 deps.add(dep) 832 try:
833 with open(setupscript, "rb") as f:
834 config = tomllib.load(f)
835 except Exception:
836 logger.exception("Failed to parse pyproject.toml")
837 return False
838
839 build_backend = config["build-system"]["build-backend"]
840 if build_backend in self.build_backend_map:
841 classes.append(self.build_backend_map[build_backend])
842 else:
843 logger.error(
844 "Unsupported build-backend: %s, cannot use pyproject.toml. Will try to use legacy setup.py"
845 % build_backend
846 )
847 return False
518 848
519 try: 849 licfile = ""
520 provides_output = self.run_command(['pythondeps', '-p'] + paths)
521 except (OSError, subprocess.CalledProcessError):
522 pass
523 else:
524 provides_lines = (l.rstrip() for l in provides_output.splitlines())
525 provides = set(l for l in provides_lines if l and l != 'setup')
526 deps -= provides
527 850
528 return deps 851 if build_backend == "poetry.core.masonry.api":
852 if "tool" in config and "poetry" in config["tool"]:
853 metadata = config["tool"]["poetry"]
854 else:
855 if "project" in config:
856 metadata = config["project"]
857
858 if metadata:
859 for field, values in metadata.items():
860 if field == "license":
861 # For setuptools.build_meta and flit, licence is a table
862 # but for poetry licence is a string
863 # for hatchling, both table (jsonschema) and string (iniconfig) have been used
864 if build_backend == "poetry.core.masonry.api":
865 value = values
866 else:
867 value = values.get("text", "")
868 if not value:
869 licfile = values.get("file", "")
870 continue
871 elif field == "dependencies" and build_backend == "poetry.core.masonry.api":
872 # For poetry backend, "dependencies" section looks like:
873 # [tool.poetry.dependencies]
874 # requests = "^2.13.0"
875 # requests = { version = "^2.13.0", source = "private" }
876 # See https://python-poetry.org/docs/master/pyproject/#dependencies-and-dependency-groups for more details
877 # This class doesn't handle versions anyway, so we just get the dependencies name here and construct a list
878 value = []
879 for k in values.keys():
880 value.append(k)
881 elif isinstance(values, dict):
882 for k, v in values.items():
883 info[k] = v
884 continue
885 else:
886 value = values
529 887
530 def parse_pkgdata_for_python_packages(self): 888 info[field] = value
531 suffixes = [t[0] for t in imp.get_suffixes()]
532 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
533 889
534 ldata = tinfoil.config_data.createCopy() 890 # Grab the license value before applying replacements
535 bb.parse.handle('classes/python3-dir.bbclass', ldata, True) 891 license_str = info.get("license", "").strip()
536 python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
537 892
538 dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload') 893 if license_str:
539 python_dirs = [python_sitedir + os.sep, 894 for i, line in enumerate(lines_before):
540 os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep, 895 if line.startswith("##LICENSE_PLACEHOLDER##"):
541 os.path.dirname(python_sitedir) + os.sep] 896 lines_before.insert(
542 packages = {} 897 i, "# NOTE: License in pyproject.toml is: %s" % license_str
543 for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)): 898 )
544 files_info = None
545 with open(pkgdatafile, 'r') as f:
546 for line in f.readlines():
547 field, value = line.split(': ', 1)
548 if field.startswith('FILES_INFO'):
549 files_info = ast.literal_eval(value)
550 break 899 break
551 else:
552 continue
553 900
554 for fn in files_info: 901 info["requires"] = config["build-system"]["requires"]
555 for suffix in suffixes: 902
556 if fn.endswith(suffix): 903 self.apply_info_replacements(info)
557 break 904
558 else: 905 if "classifiers" in info:
559 continue 906 license = self.handle_classifier_license(
907 info["classifiers"], info.get("license", "")
908 )
909 if license:
910 if licfile:
911 lines = []
912 md5value = bb.utils.md5_file(os.path.join(srctree, licfile))
913 lines.append('LICENSE = "%s"' % license)
914 lines.append(
915 'LIC_FILES_CHKSUM = "file://%s;md5=%s"'
916 % (licfile, md5value)
917 )
918 lines.append("")
919
920 # Replace the placeholder so we get the values in the right place in the recipe file
921 try:
922 pos = lines_before.index("##LICENSE_PLACEHOLDER##")
923 except ValueError:
924 pos = -1
925 if pos == -1:
926 lines_before.extend(lines)
927 else:
928 lines_before[pos : pos + 1] = lines
560 929
561 if fn.startswith(dynload_dir + os.sep): 930 handled.append(("license", [license, licfile, md5value]))
562 if '/.debug/' in fn: 931 else:
563 continue 932 info["license"] = license
564 base = os.path.basename(fn)
565 provided = base.split('.', 1)[0]
566 packages[provided] = os.path.basename(pkgdatafile)
567 continue
568 933
569 for python_dir in python_dirs: 934 provided_packages = self.parse_pkgdata_for_python_packages()
570 if fn.startswith(python_dir): 935 provided_packages.update(self.known_deps_map)
571 relpath = fn[len(python_dir):] 936 native_mapped_deps, native_unmapped_deps = set(), set()
572 relstart, _, relremaining = relpath.partition(os.sep) 937 mapped_deps, unmapped_deps = set(), set()
573 if relstart.endswith('.egg'):
574 relpath = relremaining
575 base, _ = os.path.splitext(relpath)
576 938
577 if '/.debug/' in base: 939 if "requires" in info:
578 continue 940 for require in info["requires"]:
579 if os.path.basename(base) == '__init__': 941 mapped = provided_packages.get(require)
580 base = os.path.dirname(base)
581 base = base.replace(os.sep + os.sep, os.sep)
582 provided = base.replace(os.sep, '.')
583 packages[provided] = os.path.basename(pkgdatafile)
584 return packages
585 942
586 @classmethod 943 if mapped:
587 def run_command(cls, cmd, **popenargs): 944 logger.debug("Mapped %s to %s" % (require, mapped))
588 if 'stderr' not in popenargs: 945 native_mapped_deps.add(mapped)
589 popenargs['stderr'] = subprocess.STDOUT 946 else:
590 try: 947 logger.debug("Could not map %s" % require)
591 return subprocess.check_output(cmd, **popenargs).decode('utf-8') 948 native_unmapped_deps.add(require)
592 except OSError as exc: 949
593 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc) 950 info.pop("requires")
594 raise 951
595 except subprocess.CalledProcessError as exc: 952 if native_mapped_deps != set():
596 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output) 953 native_mapped_deps = {
597 raise 954 item + "-native" for item in native_mapped_deps
955 }
956 native_mapped_deps -= set(self.excluded_native_pkgdeps)
957 if native_mapped_deps != set():
958 info["requires"] = " ".join(sorted(native_mapped_deps))
959
960 if native_unmapped_deps:
961 lines_after.append("")
962 lines_after.append(
963 "# WARNING: We were unable to map the following python package/module"
964 )
965 lines_after.append(
966 "# dependencies to the bitbake packages which include them:"
967 )
968 lines_after.extend(
969 "# {}".format(d) for d in sorted(native_unmapped_deps)
970 )
971
972 if "dependencies" in info:
973 for dependency in info["dependencies"]:
974 mapped = provided_packages.get(dependency)
975 if mapped:
976 logger.debug("Mapped %s to %s" % (dependency, mapped))
977 mapped_deps.add(mapped)
978 else:
979 logger.debug("Could not map %s" % dependency)
980 unmapped_deps.add(dependency)
981
982 info.pop("dependencies")
983
984 if mapped_deps != set():
985 if mapped_deps != set():
986 info["dependencies"] = " ".join(sorted(mapped_deps))
987
988 if unmapped_deps:
989 lines_after.append("")
990 lines_after.append(
991 "# WARNING: We were unable to map the following python package/module"
992 )
993 lines_after.append(
994 "# runtime dependencies to the bitbake packages which include them:"
995 )
996 lines_after.extend(
997 "# {}".format(d) for d in sorted(unmapped_deps)
998 )
999
1000 self.map_info_to_bbvar(info, extravalues)
1001
1002 handled.append("buildsystem")
1003 except Exception:
1004 logger.exception("Failed to correctly handle pyproject.toml, falling back to another method")
1005 return False
598 1006
599 1007
600def gather_setup_info(fileobj): 1008def gather_setup_info(fileobj):
@@ -710,5 +1118,7 @@ def has_non_literals(value):
710 1118
711 1119
712def register_recipe_handlers(handlers): 1120def register_recipe_handlers(handlers):
713 # We need to make sure this is ahead of the makefile fallback handler 1121 # We need to make sure these are ahead of the makefile fallback handler
714 handlers.append((PythonRecipeHandler(), 70)) 1122 # and the pyproject.toml handler ahead of the setup.py handler
1123 handlers.append((PythonPyprojectTomlRecipeHandler(), 75))
1124 handlers.append((PythonSetupPyRecipeHandler(), 70))
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py
new file mode 100644
index 0000000000..5cc53931f0
--- /dev/null
+++ b/scripts/lib/recipetool/create_go.py
@@ -0,0 +1,777 @@
1# Recipe creation tool - go support plugin
2#
3# The code is based on golang internals. See the afftected
4# methods for further reference and information.
5#
6# Copyright (C) 2023 Weidmueller GmbH & Co KG
7# Author: Lukas Funke <lukas.funke@weidmueller.com>
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12
13from collections import namedtuple
14from enum import Enum
15from html.parser import HTMLParser
16from recipetool.create import RecipeHandler, handle_license_vars
17from recipetool.create import find_licenses, tidy_licenses, fixup_license
18from recipetool.create import determine_from_url
19from urllib.error import URLError, HTTPError
20
21import bb.utils
22import json
23import logging
24import os
25import re
26import subprocess
27import sys
28import shutil
29import tempfile
30import urllib.parse
31import urllib.request
32
33
34GoImport = namedtuple('GoImport', 'root vcs url suffix')
35logger = logging.getLogger('recipetool')
36CodeRepo = namedtuple(
37 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor')
38
39tinfoil = None
40
41# Regular expression to parse pseudo semantic version
42# see https://go.dev/ref/mod#pseudo-versions
43re_pseudo_semver = re.compile(
44 r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$")
45# Regular expression to parse semantic version
46re_semver = re.compile(
47 r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
48
49
50def tinfoil_init(instance):
51 global tinfoil
52 tinfoil = instance
53
54
55class GoRecipeHandler(RecipeHandler):
56 """Class to handle the go recipe creation"""
57
58 @staticmethod
59 def __ensure_go():
60 """Check if the 'go' command is available in the recipes"""
61 recipe = "go-native"
62 if not tinfoil.recipes_parsed:
63 tinfoil.parse_recipes()
64 try:
65 rd = tinfoil.parse_recipe(recipe)
66 except bb.providers.NoProvider:
67 bb.error(
68 "Nothing provides '%s' which is required for the build" % (recipe))
69 bb.note(
70 "You will likely need to add a layer that provides '%s'" % (recipe))
71 return None
72
73 bindir = rd.getVar('STAGING_BINDIR_NATIVE')
74 gopath = os.path.join(bindir, 'go')
75
76 if not os.path.exists(gopath):
77 tinfoil.build_targets(recipe, 'addto_recipe_sysroot')
78
79 if not os.path.exists(gopath):
80 logger.error(
81 '%s required to process specified source, but %s did not seem to populate it' % 'go', recipe)
82 return None
83
84 return bindir
85
86 def __resolve_repository_static(self, modulepath):
87 """Resolve the repository in a static manner
88
89 The method is based on the go implementation of
90 `repoRootFromVCSPaths` in
91 https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
92 """
93
94 url = urllib.parse.urlparse("https://" + modulepath)
95 req = urllib.request.Request(url.geturl())
96
97 try:
98 resp = urllib.request.urlopen(req)
99 # Some modulepath are just redirects to github (or some other vcs
100 # hoster). Therefore, we check if this modulepath redirects to
101 # somewhere else
102 if resp.geturl() != url.geturl():
103 bb.debug(1, "%s is redirectred to %s" %
104 (url.geturl(), resp.geturl()))
105 url = urllib.parse.urlparse(resp.geturl())
106 modulepath = url.netloc + url.path
107
108 except URLError as url_err:
109 # This is probably because the module path
110 # contains the subdir and major path. Thus,
111 # we ignore this error for now
112 logger.debug(
113 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err)))
114
115 host, _, _ = modulepath.partition('/')
116
117 class vcs(Enum):
118 pathprefix = "pathprefix"
119 regexp = "regexp"
120 type = "type"
121 repo = "repo"
122 check = "check"
123 schemelessRepo = "schemelessRepo"
124
125 # GitHub
126 vcsGitHub = {}
127 vcsGitHub[vcs.pathprefix] = "github.com"
128 vcsGitHub[vcs.regexp] = re.compile(
129 r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
130 vcsGitHub[vcs.type] = "git"
131 vcsGitHub[vcs.repo] = "https://\\g<root>"
132
133 # Bitbucket
134 vcsBitbucket = {}
135 vcsBitbucket[vcs.pathprefix] = "bitbucket.org"
136 vcsBitbucket[vcs.regexp] = re.compile(
137 r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
138 vcsBitbucket[vcs.type] = "git"
139 vcsBitbucket[vcs.repo] = "https://\\g<root>"
140
141 # IBM DevOps Services (JazzHub)
142 vcsIBMDevOps = {}
143 vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git"
144 vcsIBMDevOps[vcs.regexp] = re.compile(
145 r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
146 vcsIBMDevOps[vcs.type] = "git"
147 vcsIBMDevOps[vcs.repo] = "https://\\g<root>"
148
149 # Git at Apache
150 vcsApacheGit = {}
151 vcsApacheGit[vcs.pathprefix] = "git.apache.org"
152 vcsApacheGit[vcs.regexp] = re.compile(
153 r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
154 vcsApacheGit[vcs.type] = "git"
155 vcsApacheGit[vcs.repo] = "https://\\g<root>"
156
157 # Git at OpenStack
158 vcsOpenStackGit = {}
159 vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org"
160 vcsOpenStackGit[vcs.regexp] = re.compile(
161 r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
162 vcsOpenStackGit[vcs.type] = "git"
163 vcsOpenStackGit[vcs.repo] = "https://\\g<root>"
164
165 # chiselapp.com for fossil
166 vcsChiselapp = {}
167 vcsChiselapp[vcs.pathprefix] = "chiselapp.com"
168 vcsChiselapp[vcs.regexp] = re.compile(
169 r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$')
170 vcsChiselapp[vcs.type] = "fossil"
171 vcsChiselapp[vcs.repo] = "https://\\g<root>"
172
173 # General syntax for any server.
174 # Must be last.
175 vcsGeneralServer = {}
176 vcsGeneralServer[vcs.regexp] = re.compile(
177 "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$")
178 vcsGeneralServer[vcs.schemelessRepo] = True
179
180 vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps,
181 vcsApacheGit, vcsOpenStackGit, vcsChiselapp,
182 vcsGeneralServer]
183
184 if modulepath.startswith("example.net") or modulepath == "rsc.io":
185 logger.warning("Suspicious module path %s" % modulepath)
186 return None
187 if modulepath.startswith("http:") or modulepath.startswith("https:"):
188 logger.warning("Import path should not start with %s %s" %
189 ("http", "https"))
190 return None
191
192 rootpath = None
193 vcstype = None
194 repourl = None
195 suffix = None
196
197 for srv in vcsPaths:
198 m = srv[vcs.regexp].match(modulepath)
199 if vcs.pathprefix in srv:
200 if host == srv[vcs.pathprefix]:
201 rootpath = m.group('root')
202 vcstype = srv[vcs.type]
203 repourl = m.expand(srv[vcs.repo])
204 suffix = m.group('suffix')
205 break
206 elif m and srv[vcs.schemelessRepo]:
207 rootpath = m.group('root')
208 vcstype = m[vcs.type]
209 repourl = m[vcs.repo]
210 suffix = m.group('suffix')
211 break
212
213 return GoImport(rootpath, vcstype, repourl, suffix)
214
215 def __resolve_repository_dynamic(self, modulepath):
216 """Resolve the repository root in a dynamic manner.
217
218 The method is based on the go implementation of
219 `repoRootForImportDynamic` in
220 https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
221 """
222 url = urllib.parse.urlparse("https://" + modulepath)
223
224 class GoImportHTMLParser(HTMLParser):
225
226 def __init__(self):
227 super().__init__()
228 self.__srv = {}
229
230 def handle_starttag(self, tag, attrs):
231 if tag == 'meta' and list(
232 filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)):
233 content = list(
234 filter(lambda a: (a[0] == 'content'), attrs))
235 if content:
236 srv = content[0][1].split()
237 self.__srv[srv[0]] = srv
238
239 def go_import(self, modulepath):
240 if modulepath in self.__srv:
241 srv = self.__srv[modulepath]
242 return GoImport(srv[0], srv[1], srv[2], None)
243 return None
244
245 url = url.geturl() + "?go-get=1"
246 req = urllib.request.Request(url)
247
248 try:
249 body = urllib.request.urlopen(req).read()
250 except HTTPError as http_err:
251 logger.warning(
252 "Unclean status when fetching page from [%s]: %s", url, str(http_err))
253 body = http_err.fp.read()
254 except URLError as url_err:
255 logger.warning(
256 "Failed to fetch page from [%s]: %s", url, str(url_err))
257 return None
258
259 parser = GoImportHTMLParser()
260 parser.feed(body.decode('utf-8'))
261 parser.close()
262
263 return parser.go_import(modulepath)
264
265 def __resolve_from_golang_proxy(self, modulepath, version):
266 """
267 Resolves repository data from golang proxy
268 """
269 url = urllib.parse.urlparse("https://proxy.golang.org/"
270 + modulepath
271 + "/@v/"
272 + version
273 + ".info")
274
275 # Transform url to lower case, golang proxy doesn't like mixed case
276 req = urllib.request.Request(url.geturl().lower())
277
278 try:
279 resp = urllib.request.urlopen(req)
280 except URLError as url_err:
281 logger.warning(
282 "Failed to fetch page from [%s]: %s", url, str(url_err))
283 return None
284
285 golang_proxy_res = resp.read().decode('utf-8')
286 modinfo = json.loads(golang_proxy_res)
287
288 if modinfo and 'Origin' in modinfo:
289 origin = modinfo['Origin']
290 _root_url = urllib.parse.urlparse(origin['URL'])
291
292 # We normalize the repo URL since we don't want the scheme in it
293 _subdir = origin['Subdir'] if 'Subdir' in origin else None
294 _root, _, _ = self.__split_path_version(modulepath)
295 if _subdir:
296 _root = _root[:-len(_subdir)].strip('/')
297
298 _commit = origin['Hash']
299 _vcs = origin['VCS']
300 return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit)
301
302 return None
303
304 def __resolve_repository(self, modulepath):
305 """
306 Resolves src uri from go module-path
307 """
308 repodata = self.__resolve_repository_static(modulepath)
309 if not repodata or not repodata.url:
310 repodata = self.__resolve_repository_dynamic(modulepath)
311 if not repodata or not repodata.url:
312 logger.error(
313 "Could not resolve repository for module path '%s'" % modulepath)
314 # There is no way to recover from this
315 sys.exit(14)
316 if repodata:
317 logger.debug(1, "Resolved download path for import '%s' => %s" % (
318 modulepath, repodata.url))
319 return repodata
320
321 def __split_path_version(self, path):
322 i = len(path)
323 dot = False
324 for j in range(i, 0, -1):
325 if path[j - 1] < '0' or path[j - 1] > '9':
326 break
327 if path[j - 1] == '.':
328 dot = True
329 break
330 i = j - 1
331
332 if i <= 1 or i == len(
333 path) or path[i - 1] != 'v' or path[i - 2] != '/':
334 return path, "", True
335
336 prefix, pathMajor = path[:i - 2], path[i - 2:]
337 if dot or len(
338 pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1":
339 return path, "", False
340
341 return prefix, pathMajor, True
342
343 def __get_path_major(self, pathMajor):
344 if not pathMajor:
345 return ""
346
347 if pathMajor[0] != '/' and pathMajor[0] != '.':
348 logger.error(
349 "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor)
350
351 if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"):
352 pathMajor = pathMajor[:len("-unstable") - 2]
353
354 return pathMajor[1:]
355
356 def __build_coderepo(self, repo, path):
357 codedir = ""
358 pathprefix, pathMajor, _ = self.__split_path_version(path)
359 if repo.root == path:
360 pathprefix = path
361 elif path.startswith(repo.root):
362 codedir = pathprefix[len(repo.root):].strip('/')
363
364 pseudoMajor = self.__get_path_major(pathMajor)
365
366 logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'",
367 repo.root, codedir, pathprefix, pathMajor, pseudoMajor)
368
369 return CodeRepo(path, repo.root, codedir,
370 pathMajor, pathprefix, pseudoMajor)
371
372 def __resolve_version(self, repo, path, version):
373 hash = None
374 coderoot = self.__build_coderepo(repo, path)
375
376 def vcs_fetch_all():
377 tmpdir = tempfile.mkdtemp()
378 clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir)
379 bb.process.run(clone_cmd)
380 log_cmd = "git log --all --pretty='%H %d' --decorate=short"
381 output, _ = bb.process.run(
382 log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir)
383 bb.utils.prunedir(tmpdir)
384 return output.strip().split('\n')
385
386 def vcs_fetch_remote(tag):
387 # add * to grab ^{}
388 refs = {}
389 ls_remote_cmd = "git ls-remote -q --tags {} {}*".format(
390 repo.url, tag)
391 output, _ = bb.process.run(ls_remote_cmd)
392 output = output.strip().split('\n')
393 for line in output:
394 f = line.split(maxsplit=1)
395 if len(f) != 2:
396 continue
397
398 for prefix in ["HEAD", "refs/heads/", "refs/tags/"]:
399 if f[1].startswith(prefix):
400 refs[f[1][len(prefix):]] = f[0]
401
402 for key, hash in refs.items():
403 if key.endswith(r"^{}"):
404 refs[key.strip(r"^{}")] = hash
405
406 return refs[tag]
407
408 m_pseudo_semver = re_pseudo_semver.match(version)
409
410 if m_pseudo_semver:
411 remote_refs = vcs_fetch_all()
412 short_commit = m_pseudo_semver.group('commithash')
413 for l in remote_refs:
414 r = l.split(maxsplit=1)
415 sha1 = r[0] if len(r) else None
416 if not sha1:
417 logger.error(
418 "Ups: could not resolve abbref commit for %s" % short_commit)
419
420 elif sha1.startswith(short_commit):
421 hash = sha1
422 break
423 else:
424 m_semver = re_semver.match(version)
425 if m_semver:
426
427 def get_sha1_remote(re):
428 rsha1 = None
429 for line in remote_refs:
430 # Split lines of the following format:
431 # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag)
432 lineparts = line.split(maxsplit=1)
433 sha1 = lineparts[0] if len(lineparts) else None
434 refstring = lineparts[1] if len(
435 lineparts) == 2 else None
436 if refstring:
437 # Normalize tag string and split in case of multiple
438 # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...)
439 refs = refstring.strip('(), ').split(',')
440 for ref in refs:
441 if re.match(ref.strip()):
442 rsha1 = sha1
443 return rsha1
444
445 semver = "v" + m_semver.group('major') + "."\
446 + m_semver.group('minor') + "."\
447 + m_semver.group('patch') \
448 + (("-" + m_semver.group('prerelease'))
449 if m_semver.group('prerelease') else "")
450
451 tag = os.path.join(
452 coderoot.codeDir, semver) if coderoot.codeDir else semver
453
454 # probe tag using 'ls-remote', which is faster than fetching
455 # complete history
456 hash = vcs_fetch_remote(tag)
457 if not hash:
458 # backup: fetch complete history
459 remote_refs = vcs_fetch_all()
460 hash = get_sha1_remote(
461 re.compile(fr"(tag:|HEAD ->) ({tag})"))
462
463 logger.debug(
464 "Resolving commit for tag '%s' -> '%s'", tag, hash)
465 return hash
466
467 def __generate_srcuri_inline_fcn(self, path, version, replaces=None):
468 """Generate SRC_URI functions for go imports"""
469
470 logger.info("Resolving repository for module %s", path)
471 # First try to resolve repo and commit from golang proxy
472 # Most info is already there and we don't have to go through the
473 # repository or even perform the version resolve magic
474 golang_proxy_info = self.__resolve_from_golang_proxy(path, version)
475 if golang_proxy_info:
476 repo = golang_proxy_info[0]
477 commit = golang_proxy_info[1]
478 else:
479 # Fallback
480 # Resolve repository by 'hand'
481 repo = self.__resolve_repository(path)
482 commit = self.__resolve_version(repo, path, version)
483
484 url = urllib.parse.urlparse(repo.url)
485 repo_url = url.netloc + url.path
486
487 coderoot = self.__build_coderepo(repo, path)
488
489 inline_fcn = "${@go_src_uri("
490 inline_fcn += f"'{repo_url}','{version}'"
491 if repo_url != path:
492 inline_fcn += f",path='{path}'"
493 if coderoot.codeDir:
494 inline_fcn += f",subdir='{coderoot.codeDir}'"
495 if repo.vcs != 'git':
496 inline_fcn += f",vcs='{repo.vcs}'"
497 if replaces:
498 inline_fcn += f",replaces='{replaces}'"
499 if coderoot.pathMajor:
500 inline_fcn += f",pathmajor='{coderoot.pathMajor}'"
501 inline_fcn += ")}"
502
503 return inline_fcn, commit
504
505 def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d):
506
507 import re
508 src_uris = []
509 src_revs = []
510
511 def generate_src_rev(path, version, commithash):
512 src_rev = f"# {path}@{version} => {commithash}\n"
513 # Ups...maybe someone manipulated the source repository and the
514 # version or commit could not be resolved. This is a sign of
515 # a) the supply chain was manipulated (bad)
516 # b) the implementation for the version resolving didn't work
517 # anymore (less bad)
518 if not commithash:
519 src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
520 src_rev += f"#!!! Could not resolve version !!!\n"
521 src_rev += f"#!!! Possible supply chain attack !!!\n"
522 src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
523 src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\""
524
525 return src_rev
526
527 # we first go over replacement list, because we are essentialy
528 # interested only in the replaced path
529 if go_mod['Replace']:
530 for replacement in go_mod['Replace']:
531 oldpath = replacement['Old']['Path']
532 path = replacement['New']['Path']
533 version = ''
534 if 'Version' in replacement['New']:
535 version = replacement['New']['Version']
536
537 if os.path.exists(os.path.join(srctree, path)):
538 # the module refers to the local path, remove it from requirement list
539 # because it's a local module
540 go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath]
541 else:
542 # Replace the path and the version, so we don't iterate replacement list anymore
543 for require in go_mod['Require']:
544 if require['Path'] == oldpath:
545 require.update({'Path': path, 'Version': version})
546 break
547
548 for require in go_mod['Require']:
549 path = require['Path']
550 version = require['Version']
551
552 inline_fcn, commithash = self.__generate_srcuri_inline_fcn(
553 path, version)
554 src_uris.append(inline_fcn)
555 src_revs.append(generate_src_rev(path, version, commithash))
556
557 # strip version part from module URL /vXX
558 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
559 pn, _ = determine_from_url(baseurl)
560 go_mods_basename = "%s-modules.inc" % pn
561
562 go_mods_filename = os.path.join(localfilesdir, go_mods_basename)
563 with open(go_mods_filename, "w") as f:
564 # We introduce this indirection to make the tests a little easier
565 f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n")
566 f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n")
567 for uri in src_uris:
568 f.write(" " + uri + " \\\n")
569 f.write("\"\n\n")
570 for rev in src_revs:
571 f.write(rev + "\n")
572
573 extravalues['extrafiles'][go_mods_basename] = go_mods_filename
574
575 def __go_run_cmd(self, cmd, cwd, d):
576 return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')),
577 shell=True, cwd=cwd)
578
579 def __go_native_version(self, d):
580 stdout, _ = self.__go_run_cmd("go version", None, d)
581 m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout)
582 major = int(m.group(2))
583 minor = int(m.group(3))
584 patch = int(m.group(4))
585
586 return major, minor, patch
587
588 def __go_mod_patch(self, srctree, localfilesdir, extravalues, d):
589
590 patchfilename = "go.mod.patch"
591 go_native_version_major, go_native_version_minor, _ = self.__go_native_version(
592 d)
593 self.__go_run_cmd("go mod tidy -go=%d.%d" %
594 (go_native_version_major, go_native_version_minor), srctree, d)
595 stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
596
597 # Create patch in order to upgrade go version
598 self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d)
599 # Restore original state
600 self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d)
601
602 go_mod = json.loads(stdout)
603 tmpfile = os.path.join(localfilesdir, patchfilename)
604 shutil.move(os.path.join(srctree, patchfilename), tmpfile)
605
606 extravalues['extrafiles'][patchfilename] = tmpfile
607
608 return go_mod, patchfilename
609
610 def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d):
611 # Perform vendoring to retrieve the correct modules.txt
612 tmp_vendor_dir = tempfile.mkdtemp()
613
614 # -v causes to go to print modules.txt to stderr
615 _, stderr = self.__go_run_cmd(
616 "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d)
617
618 modules_txt_basename = "modules.txt"
619 modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename)
620 with open(modules_txt_filename, "w") as f:
621 f.write(stderr)
622
623 extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename
624
625 licenses = []
626 lic_files_chksum = []
627 licvalues = find_licenses(tmp_vendor_dir, d)
628 shutil.rmtree(tmp_vendor_dir)
629
630 if licvalues:
631 for licvalue in licvalues:
632 license = licvalue[0]
633 lics = tidy_licenses(fixup_license(license))
634 lics = [lic for lic in lics if lic not in licenses]
635 if len(lics):
636 licenses.extend(lics)
637 lic_files_chksum.append(
638 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2]))
639
640 # strip version part from module URL /vXX
641 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
642 pn, _ = determine_from_url(baseurl)
643 licenses_basename = "%s-licenses.inc" % pn
644
645 licenses_filename = os.path.join(localfilesdir, licenses_basename)
646 with open(licenses_filename, "w") as f:
647 f.write("GO_MOD_LICENSES = \"%s\"\n\n" %
648 ' & '.join(sorted(licenses, key=str.casefold)))
649 # We introduce this indirection to make the tests a little easier
650 f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n")
651 f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n")
652 for lic in lic_files_chksum:
653 f.write(" " + lic + " \\\n")
654 f.write("\"\n")
655
656 extravalues['extrafiles'][licenses_basename] = licenses_filename
657
658 def process(self, srctree, classes, lines_before,
659 lines_after, handled, extravalues):
660
661 if 'buildsystem' in handled:
662 return False
663
664 files = RecipeHandler.checkfiles(srctree, ['go.mod'])
665 if not files:
666 return False
667
668 d = bb.data.createCopy(tinfoil.config_data)
669 go_bindir = self.__ensure_go()
670 if not go_bindir:
671 sys.exit(14)
672
673 d.prependVar('PATH', '%s:' % go_bindir)
674 handled.append('buildsystem')
675 classes.append("go-vendor")
676
677 stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
678
679 go_mod = json.loads(stdout)
680 go_import = go_mod['Module']['Path']
681 go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go'])
682 go_version_major = int(go_version_match.group(1))
683 go_version_minor = int(go_version_match.group(2))
684 src_uris = []
685
686 localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-')
687 extravalues.setdefault('extrafiles', {})
688
689 # Use an explicit name determined from the module name because it
690 # might differ from the actual URL for replaced modules
691 # strip version part from module URL /vXX
692 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
693 pn, _ = determine_from_url(baseurl)
694
695 # go.mod files with version < 1.17 may not include all indirect
696 # dependencies. Thus, we have to upgrade the go version.
697 if go_version_major == 1 and go_version_minor < 17:
698 logger.warning(
699 "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.")
700 go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir,
701 extravalues, d)
702 src_uris.append(
703 "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename))
704
705 # Check whether the module is vendored. If so, we have nothing to do.
706 # Otherwise we gather all dependencies and add them to the recipe
707 if not os.path.exists(os.path.join(srctree, "vendor")):
708
709 # Write additional $BPN-modules.inc file
710 self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d)
711 lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"")
712 lines_before.append("require %s-licenses.inc" % (pn))
713
714 self.__rewrite_src_uri(lines_before, ["file://modules.txt"])
715
716 self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d)
717 lines_before.append("require %s-modules.inc" % (pn))
718
719 # Do generic license handling
720 handle_license_vars(srctree, lines_before, handled, extravalues, d)
721 self.__rewrite_lic_uri(lines_before)
722
723 lines_before.append("GO_IMPORT = \"{}\"".format(baseurl))
724 lines_before.append("SRCREV_FORMAT = \"${BPN}\"")
725
726 def __update_lines_before(self, updated, newlines, lines_before):
727 if updated:
728 del lines_before[:]
729 for line in newlines:
730 # Hack to avoid newlines that edit_metadata inserts
731 if line.endswith('\n'):
732 line = line[:-1]
733 lines_before.append(line)
734 return updated
735
736 def __rewrite_lic_uri(self, lines_before):
737
738 def varfunc(varname, origvalue, op, newlines):
739 if varname == 'LIC_FILES_CHKSUM':
740 new_licenses = []
741 licenses = origvalue.split('\\')
742 for license in licenses:
743 if not license:
744 logger.warning("No license file was detected for the main module!")
745 # the license list of the main recipe must be empty
746 # this can happen for example in case of CLOSED license
747 # Fall through to complete recipe generation
748 continue
749 license = license.strip()
750 uri, chksum = license.split(';', 1)
751 url = urllib.parse.urlparse(uri)
752 new_uri = os.path.join(
753 url.scheme + "://", "src", "${GO_IMPORT}", url.netloc + url.path) + ";" + chksum
754 new_licenses.append(new_uri)
755
756 return new_licenses, None, -1, True
757 return origvalue, None, 0, True
758
759 updated, newlines = bb.utils.edit_metadata(
760 lines_before, ['LIC_FILES_CHKSUM'], varfunc)
761 return self.__update_lines_before(updated, newlines, lines_before)
762
763 def __rewrite_src_uri(self, lines_before, additional_uris = []):
764
765 def varfunc(varname, origvalue, op, newlines):
766 if varname == 'SRC_URI':
767 src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"]
768 src_uri.extend(additional_uris)
769 return src_uri, None, -1, True
770 return origvalue, None, 0, True
771
772 updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc)
773 return self.__update_lines_before(updated, newlines, lines_before)
774
775
776def register_recipe_handlers(handlers):
777 handlers.append((GoRecipeHandler(), 60))
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py
index 3394a89970..8c4cdd5234 100644
--- a/scripts/lib/recipetool/create_npm.py
+++ b/scripts/lib/recipetool/create_npm.py
@@ -13,10 +13,11 @@ import sys
13import tempfile 13import tempfile
14import bb 14import bb
15from bb.fetch2.npm import NpmEnvironment 15from bb.fetch2.npm import NpmEnvironment
16from bb.fetch2.npm import npm_package
16from bb.fetch2.npmsw import foreach_dependencies 17from bb.fetch2.npmsw import foreach_dependencies
18from oe.license_finder import match_licenses, find_license_files
17from recipetool.create import RecipeHandler 19from recipetool.create import RecipeHandler
18from recipetool.create import get_license_md5sums 20from recipetool.create import generate_common_licenses_chksums
19from recipetool.create import guess_license
20from recipetool.create import split_pkg_licenses 21from recipetool.create import split_pkg_licenses
21logger = logging.getLogger('recipetool') 22logger = logging.getLogger('recipetool')
22 23
@@ -31,15 +32,6 @@ class NpmRecipeHandler(RecipeHandler):
31 """Class to handle the npm recipe creation""" 32 """Class to handle the npm recipe creation"""
32 33
33 @staticmethod 34 @staticmethod
34 def _npm_name(name):
35 """Generate a Yocto friendly npm name"""
36 name = re.sub("/", "-", name)
37 name = name.lower()
38 name = re.sub(r"[^\-a-z0-9]", "", name)
39 name = name.strip("-")
40 return name
41
42 @staticmethod
43 def _get_registry(lines): 35 def _get_registry(lines):
44 """Get the registry value from the 'npm://registry' url""" 36 """Get the registry value from the 'npm://registry' url"""
45 registry = None 37 registry = None
@@ -120,41 +112,71 @@ class NpmRecipeHandler(RecipeHandler):
120 """Return the extra license files and the list of packages""" 112 """Return the extra license files and the list of packages"""
121 licfiles = [] 113 licfiles = []
122 packages = {} 114 packages = {}
115 # Licenses from package.json will point to COMMON_LICENSE_DIR so we need
116 # to associate them explicitely to packages for split_pkg_licenses()
117 fallback_licenses = dict()
118
119 def _find_package_licenses(destdir):
120 """Either find license files, or use package.json metadata"""
121 def _get_licenses_from_package_json(package_json):
122 with open(os.path.join(srctree, package_json), "r") as f:
123 data = json.load(f)
124 if "license" in data:
125 licenses = data["license"].split(" ")
126 licenses = [license.strip("()") for license in licenses if license != "OR" and license != "AND"]
127 return [], licenses
128 else:
129 return [package_json], None
123 130
124 # Handle the parent package
125 packages["${PN}"] = ""
126
127 def _licfiles_append_fallback_readme_files(destdir):
128 """Append README files as fallback to license files if a license files is missing"""
129
130 fallback = True
131 readmes = []
132 basedir = os.path.join(srctree, destdir) 131 basedir = os.path.join(srctree, destdir)
133 for fn in os.listdir(basedir): 132 licfiles = find_license_files(basedir)
134 upper = fn.upper() 133 if len(licfiles) > 0:
135 if upper.startswith("README"): 134 return licfiles, None
136 fullpath = os.path.join(basedir, fn) 135 else:
137 readmes.append(fullpath) 136 # A license wasn't found in the package directory, so we'll use the package.json metadata
138 if upper.startswith("COPYING") or "LICENCE" in upper or "LICENSE" in upper: 137 pkg_json = os.path.join(basedir, "package.json")
139 fallback = False 138 return _get_licenses_from_package_json(pkg_json)
140 if fallback: 139
141 for readme in readmes: 140 def _get_package_licenses(destdir, package):
142 licfiles.append(os.path.relpath(readme, srctree)) 141 (package_licfiles, package_licenses) = _find_package_licenses(destdir)
142 if package_licfiles:
143 licfiles.extend(package_licfiles)
144 else:
145 fallback_licenses[package] = package_licenses
143 146
144 # Handle the dependencies 147 # Handle the dependencies
145 def _handle_dependency(name, params, deptree): 148 def _handle_dependency(name, params, destdir):
146 suffix = "-".join([self._npm_name(dep) for dep in deptree]) 149 deptree = destdir.split('node_modules/')
147 destdirs = [os.path.join("node_modules", dep) for dep in deptree] 150 suffix = "-".join([npm_package(dep) for dep in deptree])
148 destdir = os.path.join(*destdirs) 151 packages["${PN}" + suffix] = destdir
149 packages["${PN}-" + suffix] = destdir 152 _get_package_licenses(destdir, "${PN}" + suffix)
150 _licfiles_append_fallback_readme_files(destdir)
151 153
152 with open(shrinkwrap_file, "r") as f: 154 with open(shrinkwrap_file, "r") as f:
153 shrinkwrap = json.load(f) 155 shrinkwrap = json.load(f)
154
155 foreach_dependencies(shrinkwrap, _handle_dependency, dev) 156 foreach_dependencies(shrinkwrap, _handle_dependency, dev)
156 157
157 return licfiles, packages 158 # Handle the parent package
159 packages["${PN}"] = ""
160 _get_package_licenses(srctree, "${PN}")
161
162 return licfiles, packages, fallback_licenses
163
164 # Handle the peer dependencies
165 def _handle_peer_dependency(self, shrinkwrap_file):
166 """Check if package has peer dependencies and show warning if it is the case"""
167 with open(shrinkwrap_file, "r") as f:
168 shrinkwrap = json.load(f)
169
170 packages = shrinkwrap.get("packages", {})
171 peer_deps = packages.get("", {}).get("peerDependencies", {})
172
173 for peer_dep in peer_deps:
174 peer_dep_yocto_name = npm_package(peer_dep)
175 bb.warn(peer_dep + " is a peer dependencie of the actual package. " +
176 "Please add this peer dependencie to the RDEPENDS variable as %s and generate its recipe with devtool"
177 % peer_dep_yocto_name)
178
179
158 180
159 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): 181 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
160 """Handle the npm recipe creation""" 182 """Handle the npm recipe creation"""
@@ -173,7 +195,7 @@ class NpmRecipeHandler(RecipeHandler):
173 if "name" not in data or "version" not in data: 195 if "name" not in data or "version" not in data:
174 return False 196 return False
175 197
176 extravalues["PN"] = self._npm_name(data["name"]) 198 extravalues["PN"] = npm_package(data["name"])
177 extravalues["PV"] = data["version"] 199 extravalues["PV"] = data["version"]
178 200
179 if "description" in data: 201 if "description" in data:
@@ -242,7 +264,7 @@ class NpmRecipeHandler(RecipeHandler):
242 value = origvalue.replace("version=" + data["version"], "version=${PV}") 264 value = origvalue.replace("version=" + data["version"], "version=${PV}")
243 value = value.replace("version=latest", "version=${PV}") 265 value = value.replace("version=latest", "version=${PV}")
244 values = [line.strip() for line in value.strip('\n').splitlines()] 266 values = [line.strip() for line in value.strip('\n').splitlines()]
245 if "dependencies" in shrinkwrap: 267 if "dependencies" in shrinkwrap.get("packages", {}).get("", {}):
246 values.append(url_recipe) 268 values.append(url_recipe)
247 return values, None, 4, False 269 return values, None, 4, False
248 270
@@ -258,40 +280,19 @@ class NpmRecipeHandler(RecipeHandler):
258 fetcher.unpack(srctree) 280 fetcher.unpack(srctree)
259 281
260 bb.note("Handling licences ...") 282 bb.note("Handling licences ...")
261 (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev) 283 (licfiles, packages, fallback_licenses) = self._handle_licenses(srctree, shrinkwrap_file, dev)
262 284 licvalues = match_licenses(licfiles, srctree, d)
263 def _guess_odd_license(licfiles): 285 split_pkg_licenses(licvalues, packages, lines_after, fallback_licenses)
264 import bb 286 fallback_licenses_flat = [license for sublist in fallback_licenses.values() for license in sublist]
265 287 extravalues["LIC_FILES_CHKSUM"] = generate_common_licenses_chksums(fallback_licenses_flat, d)
266 md5sums = get_license_md5sums(d, linenumbers=True) 288 extravalues["LICENSE"] = fallback_licenses_flat
267
268 chksums = []
269 licenses = []
270 for licfile in licfiles:
271 f = os.path.join(srctree, licfile)
272 md5value = bb.utils.md5_file(f)
273 (license, beginline, endline, md5) = md5sums.get(md5value,
274 (None, "", "", ""))
275 if not license:
276 license = "Unknown"
277 logger.info("Please add the following line for '%s' to a "
278 "'lib/recipetool/licenses.csv' and replace `Unknown`, "
279 "`X`, `Y` and `MD5` with the license, begin line, "
280 "end line and partial MD5 checksum:\n" \
281 "%s,Unknown,X,Y,MD5" % (licfile, md5value))
282 chksums.append("file://%s%s%s;md5=%s" % (licfile,
283 ";beginline=%s" % (beginline) if beginline else "",
284 ";endline=%s" % (endline) if endline else "",
285 md5 if md5 else md5value))
286 licenses.append((license, licfile, md5value))
287 return (licenses, chksums)
288
289 (licenses, extravalues["LIC_FILES_CHKSUM"]) = _guess_odd_license(licfiles)
290 split_pkg_licenses([*licenses, *guess_license(srctree, d)], packages, lines_after)
291 289
292 classes.append("npm") 290 classes.append("npm")
293 handled.append("buildsystem") 291 handled.append("buildsystem")
294 292
293 # Check if package has peer dependencies and inform the user
294 self._handle_peer_dependency(shrinkwrap_file)
295
295 return True 296 return True
296 297
297def register_recipe_handlers(handlers): 298def register_recipe_handlers(handlers):
diff --git a/scripts/lib/recipetool/licenses.csv b/scripts/lib/recipetool/licenses.csv
deleted file mode 100644
index 0d3fb0607b..0000000000
--- a/scripts/lib/recipetool/licenses.csv
+++ /dev/null
@@ -1,37 +0,0 @@
10636e73ff0215e8d672dc4c32c317bb3,GPLv2
212f884d2ae1ff87c09e5b7ccc2c4ca7e,GPLv2
318810669f13b87348459e611d31ab760,GPLv2
4252890d9eee26aab7b432e8b8a616475,LGPLv2
52d5025d4aa3495befef8f17206a5b0a1,LGPLv2.1
63214f080875748938ba060314b4f727d,LGPLv2
7385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0
8393a5ca445f6965873eca0259a17f833,GPLv2
93b83ef96387f14655fc854ddc3c6bd57,Apache-2.0
103bf50002aefd002f49e7bb854063f7e7,LGPLv2
114325afd396febcb659c36b49533135d4,GPLv2
124fbd65380cdd255951079008b364516c,LGPLv2.1
1354c7042be62e169199200bc6477f04d1,BSD-3-Clause
1455ca817ccb7d5b5b66355690e9abc605,LGPLv2
1559530bdf33659b29e73d4adb9f9f6552,GPLv2
165f30f0716dfdd0d91eb439ebec522ec2,LGPLv2
176a6a8e020838b23406c81b19c1d46df6,LGPLv3
18751419260aa954499f7abaabaa882bbe,GPLv2
197fbc338309ac38fefcd64b04bb903e34,LGPLv2.1
208ca43cbc842c2336e835926c2166c28b,GPLv2
2194d55d512a9ba36caa9b7df079bae19f,GPLv2
229ac2e7cff1ddaf48b6eab6028f23ef88,GPLv2
239f604d8a4f8e74f4f5140845a21b6674,LGPLv2
24a6f89e2100d9b6cdffcea4f398e37343,LGPLv2.1
25b234ee4d69f5fce4486a80fdaf4a4263,GPLv2
26bbb461211a33b134d42ed5ee802b37ff,LGPLv2.1
27bfe1f75d606912a4111c90743d6c7325,MPL-1.1
28c93c0550bd3173f4504b2cbd8991e50b,GPLv2
29d32239bcb673463ab874e80d47fae504,GPLv3
30d7810fab7487fb0aad327b76f1be7cd7,GPLv2
31d8045f3b8f929c1cb29a1e3fd737b499,LGPLv2.1
32db979804f025cf55aabec7129cb671ed,LGPLv2
33eb723b61539feef013de476e68b5c50a,GPLv2
34ebb5c50ab7cab4baeffba14977030c07,GPLv2
35f27defe1e96c2e1ecd4e0c9be8967949,GPLv3
36fad9b3332be894bab9bc501572864b29,LGPLv2.1
37fbc093901857fcd118f065f900982c24,LGPLv2.1
diff --git a/scripts/lib/recipetool/setvar.py b/scripts/lib/recipetool/setvar.py
index f8e2ee75fb..b5ad335cae 100644
--- a/scripts/lib/recipetool/setvar.py
+++ b/scripts/lib/recipetool/setvar.py
@@ -49,6 +49,7 @@ def setvar(args):
49 for patch in patches: 49 for patch in patches:
50 for line in patch: 50 for line in patch:
51 sys.stdout.write(line) 51 sys.stdout.write(line)
52 tinfoil.modified_files()
52 return 0 53 return 0
53 54
54 55
diff --git a/scripts/lib/resulttool/junit.py b/scripts/lib/resulttool/junit.py
new file mode 100644
index 0000000000..c7a53dc550
--- /dev/null
+++ b/scripts/lib/resulttool/junit.py
@@ -0,0 +1,77 @@
1# resulttool - report test results in JUnit XML format
2#
3# Copyright (c) 2024, Siemens AG.
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import re
10import xml.etree.ElementTree as ET
11import resulttool.resultutils as resultutils
12
13def junit(args, logger):
14 testresults = resultutils.load_resultsdata(args.json_file, configmap=resultutils.store_map)
15
16 total_time = 0
17 skipped = 0
18 failures = 0
19 errors = 0
20
21 for tests in testresults.values():
22 results = tests[next(reversed(tests))].get("result", {})
23
24 for result_id, result in results.items():
25 # filter out ptestresult.rawlogs and ptestresult.sections
26 if re.search(r'\.test_', result_id):
27 total_time += result.get("duration", 0)
28
29 if result['status'] == "FAILED":
30 failures += 1
31 elif result['status'] == "ERROR":
32 errors += 1
33 elif result['status'] == "SKIPPED":
34 skipped += 1
35
36 testsuites_node = ET.Element("testsuites")
37 testsuites_node.set("time", "%s" % total_time)
38 testsuite_node = ET.SubElement(testsuites_node, "testsuite")
39 testsuite_node.set("name", "Testimage")
40 testsuite_node.set("time", "%s" % total_time)
41 testsuite_node.set("tests", "%s" % len(results))
42 testsuite_node.set("failures", "%s" % failures)
43 testsuite_node.set("errors", "%s" % errors)
44 testsuite_node.set("skipped", "%s" % skipped)
45
46 for result_id, result in results.items():
47 if re.search(r'\.test_', result_id):
48 testcase_node = ET.SubElement(testsuite_node, "testcase", {
49 "name": result_id,
50 "classname": "Testimage",
51 "time": str(result['duration'])
52 })
53 if result['status'] == "SKIPPED":
54 ET.SubElement(testcase_node, "skipped", message=result['log'])
55 elif result['status'] == "FAILED":
56 ET.SubElement(testcase_node, "failure", message=result['log'])
57 elif result['status'] == "ERROR":
58 ET.SubElement(testcase_node, "error", message=result['log'])
59
60 tree = ET.ElementTree(testsuites_node)
61
62 if args.junit_xml_path is None:
63 args.junit_xml_path = os.environ['BUILDDIR'] + '/tmp/log/oeqa/junit.xml'
64 tree.write(args.junit_xml_path, encoding='UTF-8', xml_declaration=True)
65
66 logger.info('Saved JUnit XML report as %s' % args.junit_xml_path)
67
68def register_commands(subparsers):
69 """Register subcommands from this plugin"""
70 parser_build = subparsers.add_parser('junit', help='create test report in JUnit XML format',
71 description='generate unit test report in JUnit XML format based on the latest test results in the testresults.json.',
72 group='analysis')
73 parser_build.set_defaults(func=junit)
74 parser_build.add_argument('json_file',
75 help='json file should point to the testresults.json')
76 parser_build.add_argument('-j', '--junit_xml_path',
77 help='junit xml path allows setting the path of the generated test report. The default location is <build_dir>/tmp/log/oeqa/junit.xml')
diff --git a/scripts/lib/resulttool/log.py b/scripts/lib/resulttool/log.py
index eb3927ec82..15148ca288 100644
--- a/scripts/lib/resulttool/log.py
+++ b/scripts/lib/resulttool/log.py
@@ -28,12 +28,10 @@ def show_reproducible(result, reproducible, logger):
28def log(args, logger): 28def log(args, logger):
29 results = resultutils.load_resultsdata(args.source) 29 results = resultutils.load_resultsdata(args.source)
30 30
31 ptest_count = sum(1 for _, _, _, r in resultutils.test_run_results(results) if 'ptestresult.sections' in r)
32 if ptest_count > 1 and not args.prepend_run:
33 print("%i ptest sections found. '--prepend-run' is required" % ptest_count)
34 return 1
35
36 for _, run_name, _, r in resultutils.test_run_results(results): 31 for _, run_name, _, r in resultutils.test_run_results(results):
32 if args.list_ptest:
33 print('\n'.join(sorted(r['ptestresult.sections'].keys())))
34
37 if args.dump_ptest: 35 if args.dump_ptest:
38 for sectname in ['ptestresult.sections', 'ltpposixresult.sections', 'ltpresult.sections']: 36 for sectname in ['ptestresult.sections', 'ltpposixresult.sections', 'ltpresult.sections']:
39 if sectname in r: 37 if sectname in r:
@@ -48,6 +46,9 @@ def log(args, logger):
48 46
49 os.makedirs(dest_dir, exist_ok=True) 47 os.makedirs(dest_dir, exist_ok=True)
50 dest = os.path.join(dest_dir, '%s.log' % name) 48 dest = os.path.join(dest_dir, '%s.log' % name)
49 if os.path.exists(dest):
50 print("Overlapping ptest logs found, skipping %s. The '--prepend-run' option would avoid this" % name)
51 continue
51 print(dest) 52 print(dest)
52 with open(dest, 'w') as f: 53 with open(dest, 'w') as f:
53 f.write(logdata) 54 f.write(logdata)
@@ -86,6 +87,8 @@ def register_commands(subparsers):
86 parser.set_defaults(func=log) 87 parser.set_defaults(func=log)
87 parser.add_argument('source', 88 parser.add_argument('source',
88 help='the results file/directory/URL to import') 89 help='the results file/directory/URL to import')
90 parser.add_argument('--list-ptest', action='store_true',
91 help='list the ptest test names')
89 parser.add_argument('--ptest', action='append', default=[], 92 parser.add_argument('--ptest', action='append', default=[],
90 help='show logs for a ptest') 93 help='show logs for a ptest')
91 parser.add_argument('--dump-ptest', metavar='DIR', 94 parser.add_argument('--dump-ptest', metavar='DIR',
diff --git a/scripts/lib/resulttool/manualexecution.py b/scripts/lib/resulttool/manualexecution.py
index ecb27c5933..ae0861ac6b 100755
--- a/scripts/lib/resulttool/manualexecution.py
+++ b/scripts/lib/resulttool/manualexecution.py
@@ -22,7 +22,7 @@ def load_json_file(f):
22def write_json_file(f, json_data): 22def write_json_file(f, json_data):
23 os.makedirs(os.path.dirname(f), exist_ok=True) 23 os.makedirs(os.path.dirname(f), exist_ok=True)
24 with open(f, 'w') as filedata: 24 with open(f, 'w') as filedata:
25 filedata.write(json.dumps(json_data, sort_keys=True, indent=4)) 25 filedata.write(json.dumps(json_data, sort_keys=True, indent=1))
26 26
27class ManualTestRunner(object): 27class ManualTestRunner(object):
28 28
diff --git a/scripts/lib/resulttool/regression.py b/scripts/lib/resulttool/regression.py
index 9f952951b3..33b3119c54 100644
--- a/scripts/lib/resulttool/regression.py
+++ b/scripts/lib/resulttool/regression.py
@@ -7,17 +7,213 @@
7# 7#
8 8
9import resulttool.resultutils as resultutils 9import resulttool.resultutils as resultutils
10import json
11 10
12from oeqa.utils.git import GitRepo 11from oeqa.utils.git import GitRepo
13import oeqa.utils.gitarchive as gitarchive 12import oeqa.utils.gitarchive as gitarchive
14 13
15def compare_result(logger, base_name, target_name, base_result, target_result): 14METADATA_MATCH_TABLE = {
15 "oeselftest": "OESELFTEST_METADATA"
16}
17
18OESELFTEST_METADATA_GUESS_TABLE={
19 "trigger-build-posttrigger": {
20 "run_all_tests": False,
21 "run_tests":["buildoptions.SourceMirroring.test_yocto_source_mirror"],
22 "skips": None,
23 "machine": None,
24 "select_tags":None,
25 "exclude_tags": None
26 },
27 "reproducible": {
28 "run_all_tests": False,
29 "run_tests":["reproducible"],
30 "skips": None,
31 "machine": None,
32 "select_tags":None,
33 "exclude_tags": None
34 },
35 "arch-qemu-quick": {
36 "run_all_tests": True,
37 "run_tests":None,
38 "skips": None,
39 "machine": None,
40 "select_tags":["machine"],
41 "exclude_tags": None
42 },
43 "arch-qemu-full-x86-or-x86_64": {
44 "run_all_tests": True,
45 "run_tests":None,
46 "skips": None,
47 "machine": None,
48 "select_tags":["machine", "toolchain-system"],
49 "exclude_tags": None
50 },
51 "arch-qemu-full-others": {
52 "run_all_tests": True,
53 "run_tests":None,
54 "skips": None,
55 "machine": None,
56 "select_tags":["machine", "toolchain-user"],
57 "exclude_tags": None
58 },
59 "selftest": {
60 "run_all_tests": True,
61 "run_tests":None,
62 "skips": ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror", "reproducible"],
63 "machine": None,
64 "select_tags":None,
65 "exclude_tags": ["machine", "toolchain-system", "toolchain-user"]
66 },
67 "bringup": {
68 "run_all_tests": True,
69 "run_tests":None,
70 "skips": ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror"],
71 "machine": None,
72 "select_tags":None,
73 "exclude_tags": ["machine", "toolchain-system", "toolchain-user"]
74 }
75}
76
77STATUS_STRINGS = {
78 "None": "No matching test result"
79}
80
81REGRESSIONS_DISPLAY_LIMIT=50
82
83MISSING_TESTS_BANNER = "-------------------------- Missing tests --------------------------"
84ADDITIONAL_DATA_BANNER = "--------------------- Matches and improvements --------------------"
85
86def test_has_at_least_one_matching_tag(test, tag_list):
87 return "oetags" in test and any(oetag in tag_list for oetag in test["oetags"])
88
89def all_tests_have_at_least_one_matching_tag(results, tag_list):
90 return all(test_has_at_least_one_matching_tag(test_result, tag_list) or test_name.startswith("ptestresult") for (test_name, test_result) in results.items())
91
92def any_test_have_any_matching_tag(results, tag_list):
93 return any(test_has_at_least_one_matching_tag(test, tag_list) for test in results.values())
94
95def have_skipped_test(result, test_prefix):
96 return all( result[test]['status'] == "SKIPPED" for test in result if test.startswith(test_prefix))
97
98def have_all_tests_skipped(result, test_prefixes_list):
99 return all(have_skipped_test(result, test_prefix) for test_prefix in test_prefixes_list)
100
101def guess_oeselftest_metadata(results):
102 """
103 When an oeselftest test result is lacking OESELFTEST_METADATA, we can try to guess it based on results content.
104 Check results for specific values (absence/presence of oetags, number and name of executed tests...),
105 and if it matches one of known configuration from autobuilder configuration, apply guessed OSELFTEST_METADATA
106 to it to allow proper test filtering.
107 This guessing process is tightly coupled to config.json in autobuilder. It should trigger less and less,
108 as new tests will have OESELFTEST_METADATA properly appended at test reporting time
109 """
110
111 if len(results) == 1 and "buildoptions.SourceMirroring.test_yocto_source_mirror" in results:
112 return OESELFTEST_METADATA_GUESS_TABLE['trigger-build-posttrigger']
113 elif all(result.startswith("reproducible") for result in results):
114 return OESELFTEST_METADATA_GUESS_TABLE['reproducible']
115 elif all_tests_have_at_least_one_matching_tag(results, ["machine"]):
116 return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-quick']
117 elif all_tests_have_at_least_one_matching_tag(results, ["machine", "toolchain-system"]):
118 return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-full-x86-or-x86_64']
119 elif all_tests_have_at_least_one_matching_tag(results, ["machine", "toolchain-user"]):
120 return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-full-others']
121 elif not any_test_have_any_matching_tag(results, ["machine", "toolchain-user", "toolchain-system"]):
122 if have_all_tests_skipped(results, ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror", "reproducible"]):
123 return OESELFTEST_METADATA_GUESS_TABLE['selftest']
124 elif have_all_tests_skipped(results, ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror"]):
125 return OESELFTEST_METADATA_GUESS_TABLE['bringup']
126
127 return None
128
129
130def metadata_matches(base_configuration, target_configuration):
131 """
132 For passed base and target, check test type. If test type matches one of
133 properties described in METADATA_MATCH_TABLE, compare metadata if it is
134 present in base. Return true if metadata matches, or if base lacks some
135 data (either TEST_TYPE or the corresponding metadata)
136 """
137 test_type = base_configuration.get('TEST_TYPE')
138 if test_type not in METADATA_MATCH_TABLE:
139 return True
140
141 metadata_key = METADATA_MATCH_TABLE.get(test_type)
142 if target_configuration.get(metadata_key) != base_configuration.get(metadata_key):
143 return False
144
145 return True
146
147
148def machine_matches(base_configuration, target_configuration):
149 return base_configuration.get('MACHINE') == target_configuration.get('MACHINE')
150
151
152def can_be_compared(logger, base, target):
153 """
154 Some tests are not relevant to be compared, for example some oeselftest
155 run with different tests sets or parameters. Return true if tests can be
156 compared
157 """
158 ret = True
159 base_configuration = base['configuration']
160 target_configuration = target['configuration']
161
162 # Older test results lack proper OESELFTEST_METADATA: if not present, try to guess it based on tests results.
163 if base_configuration.get('TEST_TYPE') == 'oeselftest' and 'OESELFTEST_METADATA' not in base_configuration:
164 guess = guess_oeselftest_metadata(base['result'])
165 if guess is None:
166 logger.error(f"ERROR: did not manage to guess oeselftest metadata for {base_configuration['STARTTIME']}")
167 else:
168 logger.debug(f"Enriching {base_configuration['STARTTIME']} with {guess}")
169 base_configuration['OESELFTEST_METADATA'] = guess
170 if target_configuration.get('TEST_TYPE') == 'oeselftest' and 'OESELFTEST_METADATA' not in target_configuration:
171 guess = guess_oeselftest_metadata(target['result'])
172 if guess is None:
173 logger.error(f"ERROR: did not manage to guess oeselftest metadata for {target_configuration['STARTTIME']}")
174 else:
175 logger.debug(f"Enriching {target_configuration['STARTTIME']} with {guess}")
176 target_configuration['OESELFTEST_METADATA'] = guess
177
178 # Test runs with LTP results in should only be compared with other runs with LTP tests in them
179 if base_configuration.get('TEST_TYPE') == 'runtime' and any(result.startswith("ltpresult") for result in base['result']):
180 ret = target_configuration.get('TEST_TYPE') == 'runtime' and any(result.startswith("ltpresult") for result in target['result'])
181
182 return ret and metadata_matches(base_configuration, target_configuration) \
183 and machine_matches(base_configuration, target_configuration)
184
185def get_status_str(raw_status):
186 raw_status_lower = raw_status.lower() if raw_status else "None"
187 return STATUS_STRINGS.get(raw_status_lower, raw_status)
188
189def get_additional_info_line(new_pass_count, new_tests):
190 result=[]
191 if new_tests:
192 result.append(f'+{new_tests} test(s) present')
193 if new_pass_count:
194 result.append(f'+{new_pass_count} test(s) now passing')
195
196 if not result:
197 return ""
198
199 return ' -> ' + ', '.join(result) + '\n'
200
201def compare_result(logger, base_name, target_name, base_result, target_result, display_limit=None):
16 base_result = base_result.get('result') 202 base_result = base_result.get('result')
17 target_result = target_result.get('result') 203 target_result = target_result.get('result')
18 result = {} 204 result = {}
205 new_tests = 0
206 regressions = {}
207 resultstring = ""
208 new_tests = 0
209 new_pass_count = 0
210
211 display_limit = int(display_limit) if display_limit else REGRESSIONS_DISPLAY_LIMIT
212
19 if base_result and target_result: 213 if base_result and target_result:
20 for k in base_result: 214 for k in base_result:
215 if k in ['ptestresult.rawlogs', 'ptestresult.sections']:
216 continue
21 base_testcase = base_result[k] 217 base_testcase = base_result[k]
22 base_status = base_testcase.get('status') 218 base_status = base_testcase.get('status')
23 if base_status: 219 if base_status:
@@ -27,12 +223,47 @@ def compare_result(logger, base_name, target_name, base_result, target_result):
27 result[k] = {'base': base_status, 'target': target_status} 223 result[k] = {'base': base_status, 'target': target_status}
28 else: 224 else:
29 logger.error('Failed to retrieved base test case status: %s' % k) 225 logger.error('Failed to retrieved base test case status: %s' % k)
226
227 # Also count new tests that were not present in base results: it
228 # could be newly added tests, but it could also highlights some tests
229 # renames or fixed faulty ptests
230 for k in target_result:
231 if k not in base_result:
232 new_tests += 1
30 if result: 233 if result:
31 resultstring = "Regression: %s\n %s\n" % (base_name, target_name) 234 new_pass_count = sum(test['target'] is not None and test['target'].startswith("PASS") for test in result.values())
32 for k in sorted(result): 235 # Print a regression report only if at least one test has a regression status (FAIL, SKIPPED, absent...)
33 resultstring += ' %s: %s -> %s\n' % (k, result[k]['base'], result[k]['target']) 236 if new_pass_count < len(result):
237 resultstring = "Regression: %s\n %s\n" % (base_name, target_name)
238 for k in sorted(result):
239 if not result[k]['target'] or not result[k]['target'].startswith("PASS"):
240 # Differentiate each ptest kind when listing regressions
241 key_parts = k.split('.')
242 key = '.'.join(key_parts[:2]) if k.startswith('ptest') else key_parts[0]
243 # Append new regression to corresponding test family
244 regressions[key] = regressions.setdefault(key, []) + [' %s: %s -> %s\n' % (k, get_status_str(result[k]['base']), get_status_str(result[k]['target']))]
245 resultstring += f" Total: {sum([len(regressions[r]) for r in regressions])} new regression(s):\n"
246 for k in regressions:
247 resultstring += f" {len(regressions[k])} regression(s) for {k}\n"
248 count_to_print=min([display_limit, len(regressions[k])]) if display_limit > 0 else len(regressions[k])
249 resultstring += ''.join(regressions[k][:count_to_print])
250 if count_to_print < len(regressions[k]):
251 resultstring+=' [...]\n'
252 if new_pass_count > 0:
253 resultstring += f' Additionally, {new_pass_count} previously failing test(s) is/are now passing\n'
254 if new_tests > 0:
255 resultstring += f' Additionally, {new_tests} new test(s) is/are present\n'
256 else:
257 resultstring = "%s\n%s\n" % (base_name, target_name)
258 result = None
34 else: 259 else:
35 resultstring = "Match: %s\n %s" % (base_name, target_name) 260 resultstring = "%s\n%s\n" % (base_name, target_name)
261
262 if not result:
263 additional_info = get_additional_info_line(new_pass_count, new_tests)
264 if additional_info:
265 resultstring += additional_info
266
36 return result, resultstring 267 return result, resultstring
37 268
38def get_results(logger, source): 269def get_results(logger, source):
@@ -44,12 +275,38 @@ def regression(args, logger):
44 275
45 regression_common(args, logger, base_results, target_results) 276 regression_common(args, logger, base_results, target_results)
46 277
278# Some test case naming is poor and contains random strings, particularly lttng/babeltrace.
279# Truncating the test names works since they contain file and line number identifiers
280# which allows us to match them without the random components.
281def fixup_ptest_names(results, logger):
282 for r in results:
283 for i in results[r]:
284 tests = list(results[r][i]['result'].keys())
285 for test in tests:
286 new = None
287 if test.startswith(("ptestresult.lttng-tools.", "ptestresult.babeltrace.", "ptestresult.babeltrace2")) and "_-_" in test:
288 new = test.split("_-_")[0]
289 elif test.startswith(("ptestresult.curl.")) and "__" in test:
290 new = test.split("__")[0]
291 elif test.startswith(("ptestresult.dbus.")) and "__" in test:
292 new = test.split("__")[0]
293 elif test.startswith("ptestresult.binutils") and "build-st-" in test:
294 new = test.split(" ")[0]
295 elif test.startswith("ptestresult.gcc") and "/tmp/runtest." in test:
296 new = ".".join(test.split(".")[:2])
297 if new:
298 results[r][i]['result'][new] = results[r][i]['result'][test]
299 del results[r][i]['result'][test]
300
47def regression_common(args, logger, base_results, target_results): 301def regression_common(args, logger, base_results, target_results):
48 if args.base_result_id: 302 if args.base_result_id:
49 base_results = resultutils.filter_resultsdata(base_results, args.base_result_id) 303 base_results = resultutils.filter_resultsdata(base_results, args.base_result_id)
50 if args.target_result_id: 304 if args.target_result_id:
51 target_results = resultutils.filter_resultsdata(target_results, args.target_result_id) 305 target_results = resultutils.filter_resultsdata(target_results, args.target_result_id)
52 306
307 fixup_ptest_names(base_results, logger)
308 fixup_ptest_names(target_results, logger)
309
53 matches = [] 310 matches = []
54 regressions = [] 311 regressions = []
55 notfound = [] 312 notfound = []
@@ -62,7 +319,9 @@ def regression_common(args, logger, base_results, target_results):
62 # removing any pairs which match 319 # removing any pairs which match
63 for c in base.copy(): 320 for c in base.copy():
64 for b in target.copy(): 321 for b in target.copy():
65 res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b]) 322 if not can_be_compared(logger, base_results[a][c], target_results[a][b]):
323 continue
324 res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b], args.limit)
66 if not res: 325 if not res:
67 matches.append(resstr) 326 matches.append(resstr)
68 base.remove(c) 327 base.remove(c)
@@ -71,15 +330,18 @@ def regression_common(args, logger, base_results, target_results):
71 # Should only now see regressions, we may not be able to match multiple pairs directly 330 # Should only now see regressions, we may not be able to match multiple pairs directly
72 for c in base: 331 for c in base:
73 for b in target: 332 for b in target:
74 res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b]) 333 if not can_be_compared(logger, base_results[a][c], target_results[a][b]):
334 continue
335 res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b], args.limit)
75 if res: 336 if res:
76 regressions.append(resstr) 337 regressions.append(resstr)
77 else: 338 else:
78 notfound.append("%s not found in target" % a) 339 notfound.append("%s not found in target" % a)
79 print("\n".join(sorted(matches)))
80 print("\n".join(sorted(regressions))) 340 print("\n".join(sorted(regressions)))
341 print("\n" + MISSING_TESTS_BANNER + "\n")
81 print("\n".join(sorted(notfound))) 342 print("\n".join(sorted(notfound)))
82 343 print("\n" + ADDITIONAL_DATA_BANNER + "\n")
344 print("\n".join(sorted(matches)))
83 return 0 345 return 0
84 346
85def regression_git(args, logger): 347def regression_git(args, logger):
@@ -162,6 +424,7 @@ def register_commands(subparsers):
162 help='(optional) filter the base results to this result ID') 424 help='(optional) filter the base results to this result ID')
163 parser_build.add_argument('-t', '--target-result-id', default='', 425 parser_build.add_argument('-t', '--target-result-id', default='',
164 help='(optional) filter the target results to this result ID') 426 help='(optional) filter the target results to this result ID')
427 parser_build.add_argument('-l', '--limit', default=REGRESSIONS_DISPLAY_LIMIT, help="Maximum number of changes to display per test. Can be set to 0 to print all changes")
165 428
166 parser_build = subparsers.add_parser('regression-git', help='regression git analysis', 429 parser_build = subparsers.add_parser('regression-git', help='regression git analysis',
167 description='regression analysis comparing base result set to target ' 430 description='regression analysis comparing base result set to target '
@@ -183,4 +446,5 @@ def register_commands(subparsers):
183 parser_build.add_argument('--commit-number', help="Revision number to search for, redundant if --commit is specified") 446 parser_build.add_argument('--commit-number', help="Revision number to search for, redundant if --commit is specified")
184 parser_build.add_argument('--commit2', help="Revision to compare with") 447 parser_build.add_argument('--commit2', help="Revision to compare with")
185 parser_build.add_argument('--commit-number2', help="Revision number to compare with, redundant if --commit2 is specified") 448 parser_build.add_argument('--commit-number2', help="Revision number to compare with, redundant if --commit2 is specified")
449 parser_build.add_argument('-l', '--limit', default=REGRESSIONS_DISPLAY_LIMIT, help="Maximum number of changes to display per test. Can be set to 0 to print all changes")
186 450
diff --git a/scripts/lib/resulttool/report.py b/scripts/lib/resulttool/report.py
index f0ca50ebe2..1c100b00ab 100644
--- a/scripts/lib/resulttool/report.py
+++ b/scripts/lib/resulttool/report.py
@@ -176,7 +176,10 @@ class ResultsTextReport(object):
176 vals['sort'] = line['testseries'] + "_" + line['result_id'] 176 vals['sort'] = line['testseries'] + "_" + line['result_id']
177 vals['failed_testcases'] = line['failed_testcases'] 177 vals['failed_testcases'] = line['failed_testcases']
178 for k in cols: 178 for k in cols:
179 vals[k] = "%d (%s%%)" % (line[k], format(line[k] / total_tested * 100, '.0f')) 179 if total_tested:
180 vals[k] = "%d (%s%%)" % (line[k], format(line[k] / total_tested * 100, '.0f'))
181 else:
182 vals[k] = "0 (0%)"
180 for k in maxlen: 183 for k in maxlen:
181 if k in vals and len(vals[k]) > maxlen[k]: 184 if k in vals and len(vals[k]) > maxlen[k]:
182 maxlen[k] = len(vals[k]) 185 maxlen[k] = len(vals[k])
@@ -253,7 +256,7 @@ class ResultsTextReport(object):
253 if selected_test_case_only: 256 if selected_test_case_only:
254 print_selected_testcase_result(raw_results, selected_test_case_only) 257 print_selected_testcase_result(raw_results, selected_test_case_only)
255 else: 258 else:
256 print(json.dumps(raw_results, sort_keys=True, indent=4)) 259 print(json.dumps(raw_results, sort_keys=True, indent=1))
257 else: 260 else:
258 print('Could not find raw test result for %s' % raw_test) 261 print('Could not find raw test result for %s' % raw_test)
259 return 0 262 return 0
diff --git a/scripts/lib/resulttool/resultutils.py b/scripts/lib/resulttool/resultutils.py
index 8917022d36..b8fc79a6ac 100644
--- a/scripts/lib/resulttool/resultutils.py
+++ b/scripts/lib/resulttool/resultutils.py
@@ -14,8 +14,11 @@ import scriptpath
14import copy 14import copy
15import urllib.request 15import urllib.request
16import posixpath 16import posixpath
17import logging
17scriptpath.add_oe_lib_path() 18scriptpath.add_oe_lib_path()
18 19
20logger = logging.getLogger('resulttool')
21
19flatten_map = { 22flatten_map = {
20 "oeselftest": [], 23 "oeselftest": [],
21 "runtime": [], 24 "runtime": [],
@@ -31,13 +34,19 @@ regression_map = {
31 "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE'] 34 "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE']
32} 35}
33store_map = { 36store_map = {
34 "oeselftest": ['TEST_TYPE'], 37 "oeselftest": ['TEST_TYPE', 'TESTSERIES', 'MACHINE'],
35 "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'], 38 "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'],
36 "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], 39 "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'],
37 "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], 40 "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'],
38 "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME'] 41 "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME']
39} 42}
40 43
44rawlog_sections = {
45 "ptestresult.rawlogs": "ptest",
46 "ltpresult.rawlogs": "ltp",
47 "ltpposixresult.rawlogs": "ltpposix"
48}
49
41def is_url(p): 50def is_url(p):
42 """ 51 """
43 Helper for determining if the given path is a URL 52 Helper for determining if the given path is a URL
@@ -58,7 +67,11 @@ def append_resultsdata(results, f, configmap=store_map, configvars=extra_configv
58 testseries = posixpath.basename(posixpath.dirname(url.path)) 67 testseries = posixpath.basename(posixpath.dirname(url.path))
59 else: 68 else:
60 with open(f, "r") as filedata: 69 with open(f, "r") as filedata:
61 data = json.load(filedata) 70 try:
71 data = json.load(filedata)
72 except json.decoder.JSONDecodeError:
73 print("Cannot decode {}. Possible corruption. Skipping.".format(f))
74 data = ""
62 testseries = os.path.basename(os.path.dirname(f)) 75 testseries = os.path.basename(os.path.dirname(f))
63 else: 76 else:
64 data = f 77 data = f
@@ -104,21 +117,57 @@ def filter_resultsdata(results, resultid):
104 newresults[r][i] = results[r][i] 117 newresults[r][i] = results[r][i]
105 return newresults 118 return newresults
106 119
107def strip_ptestresults(results): 120def strip_logs(results):
108 newresults = copy.deepcopy(results) 121 newresults = copy.deepcopy(results)
109 #for a in newresults2:
110 # newresults = newresults2[a]
111 for res in newresults: 122 for res in newresults:
112 if 'result' not in newresults[res]: 123 if 'result' not in newresults[res]:
113 continue 124 continue
114 if 'ptestresult.rawlogs' in newresults[res]['result']: 125 for logtype in rawlog_sections:
115 del newresults[res]['result']['ptestresult.rawlogs'] 126 if logtype in newresults[res]['result']:
127 del newresults[res]['result'][logtype]
116 if 'ptestresult.sections' in newresults[res]['result']: 128 if 'ptestresult.sections' in newresults[res]['result']:
117 for i in newresults[res]['result']['ptestresult.sections']: 129 for i in newresults[res]['result']['ptestresult.sections']:
118 if 'log' in newresults[res]['result']['ptestresult.sections'][i]: 130 if 'log' in newresults[res]['result']['ptestresult.sections'][i]:
119 del newresults[res]['result']['ptestresult.sections'][i]['log'] 131 del newresults[res]['result']['ptestresult.sections'][i]['log']
120 return newresults 132 return newresults
121 133
134# For timing numbers, crazy amounts of precision don't make sense and just confuse
135# the logs. For numbers over 1, trim to 3 decimal places, for numbers less than 1,
136# trim to 4 significant digits
137def trim_durations(results):
138 for res in results:
139 if 'result' not in results[res]:
140 continue
141 for entry in results[res]['result']:
142 if 'duration' in results[res]['result'][entry]:
143 duration = results[res]['result'][entry]['duration']
144 if duration > 1:
145 results[res]['result'][entry]['duration'] = float("%.3f" % duration)
146 elif duration < 1:
147 results[res]['result'][entry]['duration'] = float("%.4g" % duration)
148 return results
149
150def handle_cleanups(results):
151 # Remove pointless path duplication from old format reproducibility results
152 for res2 in results:
153 try:
154 section = results[res2]['result']['reproducible']['files']
155 for pkgtype in section:
156 for filelist in section[pkgtype].copy():
157 if section[pkgtype][filelist] and type(section[pkgtype][filelist][0]) == dict:
158 newlist = []
159 for entry in section[pkgtype][filelist]:
160 newlist.append(entry["reference"].split("/./")[1])
161 section[pkgtype][filelist] = newlist
162
163 except KeyError:
164 pass
165 # Remove pointless duplicate rawlogs data
166 try:
167 del results[res2]['result']['reproducible.rawlogs']
168 except KeyError:
169 pass
170
122def decode_log(logdata): 171def decode_log(logdata):
123 if isinstance(logdata, str): 172 if isinstance(logdata, str):
124 return logdata 173 return logdata
@@ -142,7 +191,7 @@ def generic_get_log(sectionname, results, section):
142 return decode_log(ptest['log']) 191 return decode_log(ptest['log'])
143 192
144def ptestresult_get_log(results, section): 193def ptestresult_get_log(results, section):
145 return generic_get_log('ptestresuls.sections', results, section) 194 return generic_get_log('ptestresult.sections', results, section)
146 195
147def generic_get_rawlogs(sectname, results): 196def generic_get_rawlogs(sectname, results):
148 if sectname not in results: 197 if sectname not in results:
@@ -151,9 +200,6 @@ def generic_get_rawlogs(sectname, results):
151 return None 200 return None
152 return decode_log(results[sectname]['log']) 201 return decode_log(results[sectname]['log'])
153 202
154def ptestresult_get_rawlogs(results):
155 return generic_get_rawlogs('ptestresult.rawlogs', results)
156
157def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False): 203def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False):
158 for res in results: 204 for res in results:
159 if res: 205 if res:
@@ -163,16 +209,20 @@ def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, p
163 os.makedirs(os.path.dirname(dst), exist_ok=True) 209 os.makedirs(os.path.dirname(dst), exist_ok=True)
164 resultsout = results[res] 210 resultsout = results[res]
165 if not ptestjson: 211 if not ptestjson:
166 resultsout = strip_ptestresults(results[res]) 212 resultsout = strip_logs(results[res])
213 trim_durations(resultsout)
214 handle_cleanups(resultsout)
167 with open(dst, 'w') as f: 215 with open(dst, 'w') as f:
168 f.write(json.dumps(resultsout, sort_keys=True, indent=4)) 216 f.write(json.dumps(resultsout, sort_keys=True, indent=1))
169 for res2 in results[res]: 217 for res2 in results[res]:
170 if ptestlogs and 'result' in results[res][res2]: 218 if ptestlogs and 'result' in results[res][res2]:
171 seriesresults = results[res][res2]['result'] 219 seriesresults = results[res][res2]['result']
172 rawlogs = ptestresult_get_rawlogs(seriesresults) 220 for logtype in rawlog_sections:
173 if rawlogs is not None: 221 logdata = generic_get_rawlogs(logtype, seriesresults)
174 with open(dst.replace(fn, "ptest-raw.log"), "w+") as f: 222 if logdata is not None:
175 f.write(rawlogs) 223 logger.info("Extracting " + rawlog_sections[logtype] + "-raw.log")
224 with open(dst.replace(fn, rawlog_sections[logtype] + "-raw.log"), "w+") as f:
225 f.write(logdata)
176 if 'ptestresult.sections' in seriesresults: 226 if 'ptestresult.sections' in seriesresults:
177 for i in seriesresults['ptestresult.sections']: 227 for i in seriesresults['ptestresult.sections']:
178 sectionlog = ptestresult_get_log(seriesresults, i) 228 sectionlog = ptestresult_get_log(seriesresults, i)
diff --git a/scripts/lib/resulttool/store.py b/scripts/lib/resulttool/store.py
index e0951f0a8f..b143334e69 100644
--- a/scripts/lib/resulttool/store.py
+++ b/scripts/lib/resulttool/store.py
@@ -65,18 +65,35 @@ def store(args, logger):
65 65
66 for r in revisions: 66 for r in revisions:
67 results = revisions[r] 67 results = revisions[r]
68 if args.revision and r[0] != args.revision:
69 logger.info('skipping %s as non-matching' % r[0])
70 continue
68 keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]} 71 keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]}
69 subprocess.check_call(["find", tempdir, "!", "-path", "./.git/*", "-delete"]) 72 subprocess.check_call(["find", tempdir, "-name", "testresults.json", "!", "-path", "./.git/*", "-delete"])
70 resultutils.save_resultsdata(results, tempdir, ptestlogs=True) 73 resultutils.save_resultsdata(results, tempdir, ptestlogs=True)
71 74
72 logger.info('Storing test result into git repository %s' % args.git_dir) 75 logger.info('Storing test result into git repository %s' % args.git_dir)
73 76
74 gitarchive.gitarchive(tempdir, args.git_dir, False, False, 77 excludes = []
78 if args.logfile_archive:
79 excludes = ['*.log', "*.log.zst"]
80
81 tagname = gitarchive.gitarchive(tempdir, args.git_dir, False, False,
75 "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", 82 "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
76 False, "{branch}/{commit_count}-g{commit}/{tag_number}", 83 False, "{branch}/{commit_count}-g{commit}/{tag_number}",
77 'Test run #{tag_number} of {branch}:{commit}', '', 84 'Test run #{tag_number} of {branch}:{commit}', '',
78 [], [], False, keywords, logger) 85 excludes, [], False, keywords, logger)
79 86
87 if args.logfile_archive:
88 logdir = args.logfile_archive + "/" + tagname
89 shutil.copytree(tempdir, logdir)
90 os.chmod(logdir, 0o755)
91 for root, dirs, files in os.walk(logdir):
92 for name in files:
93 if not name.endswith(".log"):
94 continue
95 f = os.path.join(root, name)
96 subprocess.run(["zstd", f, "--rm"], check=True, capture_output=True)
80 finally: 97 finally:
81 subprocess.check_call(["rm", "-rf", tempdir]) 98 subprocess.check_call(["rm", "-rf", tempdir])
82 99
@@ -102,3 +119,7 @@ def register_commands(subparsers):
102 help='add executed-by configuration to each result file') 119 help='add executed-by configuration to each result file')
103 parser_build.add_argument('-t', '--extra-test-env', default='', 120 parser_build.add_argument('-t', '--extra-test-env', default='',
104 help='add extra test environment data to each result file configuration') 121 help='add extra test environment data to each result file configuration')
122 parser_build.add_argument('-r', '--revision', default='',
123 help='only store data for the specified revision')
124 parser_build.add_argument('-l', '--logfile-archive', default='',
125 help='directory to separately archive log files along with a copy of the results')
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py
index 3164171eb2..32e749dbb1 100644
--- a/scripts/lib/scriptutils.py
+++ b/scripts/lib/scriptutils.py
@@ -5,7 +5,6 @@
5# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
6# 6#
7 7
8import argparse
9import glob 8import glob
10import logging 9import logging
11import os 10import os
@@ -18,13 +17,14 @@ import sys
18import tempfile 17import tempfile
19import threading 18import threading
20import importlib 19import importlib
21from importlib import machinery 20import importlib.machinery
21import importlib.util
22 22
23class KeepAliveStreamHandler(logging.StreamHandler): 23class KeepAliveStreamHandler(logging.StreamHandler):
24 def __init__(self, keepalive=True, **kwargs): 24 def __init__(self, keepalive=True, **kwargs):
25 super().__init__(**kwargs) 25 super().__init__(**kwargs)
26 if keepalive is True: 26 if keepalive is True:
27 keepalive = 5000 # default timeout 27 keepalive = 5000 # default timeout
28 self._timeout = threading.Condition() 28 self._timeout = threading.Condition()
29 self._stop = False 29 self._stop = False
30 30
@@ -35,9 +35,9 @@ class KeepAliveStreamHandler(logging.StreamHandler):
35 with self._timeout: 35 with self._timeout:
36 if not self._timeout.wait(keepalive): 36 if not self._timeout.wait(keepalive):
37 self.emit(logging.LogRecord("keepalive", logging.INFO, 37 self.emit(logging.LogRecord("keepalive", logging.INFO,
38 None, None, "Keepalive message", None, None)) 38 None, None, "Keepalive message", None, None))
39 39
40 self._thread = threading.Thread(target = thread, daemon = True) 40 self._thread = threading.Thread(target=thread, daemon=True)
41 self._thread.start() 41 self._thread.start()
42 42
43 def close(self): 43 def close(self):
@@ -71,18 +71,19 @@ def logger_setup_color(logger, color='auto'):
71 71
72 for handler in logger.handlers: 72 for handler in logger.handlers:
73 if (isinstance(handler, logging.StreamHandler) and 73 if (isinstance(handler, logging.StreamHandler) and
74 isinstance(handler.formatter, BBLogFormatter)): 74 isinstance(handler.formatter, BBLogFormatter)):
75 if color == 'always' or (color == 'auto' and handler.stream.isatty()): 75 if color == 'always' or (color == 'auto' and handler.stream.isatty()):
76 handler.formatter.enable_color() 76 handler.formatter.enable_color()
77 77
78 78
79def load_plugins(logger, plugins, pluginpath): 79def load_plugins(logger, plugins, pluginpath):
80
81 def load_plugin(name): 80 def load_plugin(name):
82 logger.debug('Loading plugin %s' % name) 81 logger.debug('Loading plugin %s' % name)
83 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) 82 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath])
84 if spec: 83 if spec:
85 return spec.loader.load_module() 84 mod = importlib.util.module_from_spec(spec)
85 spec.loader.exec_module(mod)
86 return mod
86 87
87 def plugin_name(filename): 88 def plugin_name(filename):
88 return os.path.splitext(os.path.basename(filename))[0] 89 return os.path.splitext(os.path.basename(filename))[0]
@@ -176,9 +177,15 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr
176 f.write('BB_STRICT_CHECKSUM = "ignore"\n') 177 f.write('BB_STRICT_CHECKSUM = "ignore"\n')
177 f.write('SRC_URI = "%s"\n' % srcuri) 178 f.write('SRC_URI = "%s"\n' % srcuri)
178 f.write('SRCREV = "%s"\n' % srcrev) 179 f.write('SRCREV = "%s"\n' % srcrev)
180 f.write('PV = "0.0+"\n')
179 f.write('WORKDIR = "%s"\n' % tmpworkdir) 181 f.write('WORKDIR = "%s"\n' % tmpworkdir)
182 f.write('UNPACKDIR = "%s"\n' % destdir)
183
180 # Set S out of the way so it doesn't get created under the workdir 184 # Set S out of the way so it doesn't get created under the workdir
181 f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) 185 s_dir = os.path.join(tmpdir, 'emptysrc')
186 bb.utils.mkdirhier(s_dir)
187 f.write('S = "%s"\n' % s_dir)
188
182 if not mirrors: 189 if not mirrors:
183 # We do not need PREMIRRORS since we are almost certainly 190 # We do not need PREMIRRORS since we are almost certainly
184 # fetching new source rather than something that has already 191 # fetching new source rather than something that has already
@@ -230,10 +237,6 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr
230 if e.errno != errno.ENOTEMPTY: 237 if e.errno != errno.ENOTEMPTY:
231 raise 238 raise
232 239
233 bb.utils.mkdirhier(destdir)
234 for fn in os.listdir(tmpworkdir):
235 shutil.move(os.path.join(tmpworkdir, fn), destdir)
236
237 finally: 240 finally:
238 if not preserve_tmp: 241 if not preserve_tmp:
239 shutil.rmtree(tmpdir) 242 shutil.rmtree(tmpdir)
@@ -269,12 +272,3 @@ def is_src_url(param):
269 return True 272 return True
270 return False 273 return False
271 274
272def filter_src_subdirs(pth):
273 """
274 Filter out subdirectories of initial unpacked source trees that we do not care about.
275 Used by devtool and recipetool.
276 """
277 dirlist = os.listdir(pth)
278 filterout = ['git.indirectionsymlink', 'source-date-epoch']
279 dirlist = [x for x in dirlist if x not in filterout]
280 return dirlist
diff --git a/scripts/lib/wic/canned-wks/common.wks.inc b/scripts/lib/wic/canned-wks/common.wks.inc
index 89880b417b..4a440ddafe 100644
--- a/scripts/lib/wic/canned-wks/common.wks.inc
+++ b/scripts/lib/wic/canned-wks/common.wks.inc
@@ -1,3 +1,3 @@
1# This file is included into 3 canned wks files from this directory 1# This file is included into 3 canned wks files from this directory
2part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 2part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024
3part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024 3part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024
diff --git a/scripts/lib/wic/canned-wks/directdisk-gpt.wks b/scripts/lib/wic/canned-wks/directdisk-gpt.wks
index 8d7d8de6ea..cb640056f1 100644
--- a/scripts/lib/wic/canned-wks/directdisk-gpt.wks
+++ b/scripts/lib/wic/canned-wks/directdisk-gpt.wks
@@ -3,7 +3,7 @@
3# can directly dd to boot media. 3# can directly dd to boot media.
4 4
5 5
6part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 6part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024
7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid 7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
8 8
9bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8" 9bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8"
diff --git a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks
index f61d941d6d..4fd1999ffb 100644
--- a/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks
+++ b/scripts/lib/wic/canned-wks/directdisk-multi-rootfs.wks
@@ -15,7 +15,7 @@
15# 15#
16# - or any combinations of -r and --rootfs command line options 16# - or any combinations of -r and --rootfs command line options
17 17
18part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 18part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024
19part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024 19part / --source rootfs --rootfs-dir=rootfs1 --ondisk sda --fstype=ext4 --label platform --align 1024
20part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024 20part /rescue --source rootfs --rootfs-dir=rootfs2 --ondisk sda --fstype=ext4 --label secondary --align 1024
21 21
diff --git a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
index 7300e65e32..5211972955 100644
--- a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
+++ b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
@@ -1,3 +1,3 @@
1bootloader --ptable gpt 1bootloader --ptable gpt
2part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.0 2part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.2
3part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ 3part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/
diff --git a/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in
new file mode 100644
index 0000000000..cac0fa32cd
--- /dev/null
+++ b/scripts/lib/wic/canned-wks/efi-uki-bootdisk.wks.in
@@ -0,0 +1,3 @@
1bootloader --ptable gpt --timeout=5
2part /boot --source bootimg_efi --sourceparams="loader=${EFI_PROVIDER}" --label boot --active --align 1024 --use-uuid --part-name="ESP" --part-type=C12A7328-F81F-11D2-BA4B-00A0C93EC93B --overhead-factor=1
3part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/
diff --git a/scripts/lib/wic/canned-wks/mkefidisk.wks b/scripts/lib/wic/canned-wks/mkefidisk.wks
index 9f534fe184..16dfe76dfe 100644
--- a/scripts/lib/wic/canned-wks/mkefidisk.wks
+++ b/scripts/lib/wic/canned-wks/mkefidisk.wks
@@ -2,10 +2,10 @@
2# long-description: Creates a partitioned EFI disk image that the user 2# long-description: Creates a partitioned EFI disk image that the user
3# can directly dd to boot media. 3# can directly dd to boot media.
4 4
5part /boot --source bootimg-efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024 5part /boot --source bootimg_efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024
6 6
7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid 7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
8 8
9part swap --ondisk sda --size 44 --label swap1 --fstype=swap 9part swap --ondisk sda --size 44 --label swap1 --fstype=swap
10 10
11bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=ttyS0,115200 console=tty0" 11bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=${KERNEL_CONSOLE} console=tty0"
diff --git a/scripts/lib/wic/canned-wks/mkhybridiso.wks b/scripts/lib/wic/canned-wks/mkhybridiso.wks
index 48c5ac4791..c3a030e5b4 100644
--- a/scripts/lib/wic/canned-wks/mkhybridiso.wks
+++ b/scripts/lib/wic/canned-wks/mkhybridiso.wks
@@ -2,6 +2,6 @@
2# long-description: Creates an EFI and legacy bootable hybrid ISO image 2# long-description: Creates an EFI and legacy bootable hybrid ISO image
3# which can be used on optical media as well as USB media. 3# which can be used on optical media as well as USB media.
4 4
5part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO 5part /boot --source isoimage_isohybrid --sourceparams="loader=grub-efi,image_name=HYBRID_ISO_IMG" --ondisk cd --label HYBRIDISO
6 6
7bootloader --timeout=15 --append="" 7bootloader --timeout=15 --append=""
diff --git a/scripts/lib/wic/canned-wks/qemuloongarch.wks b/scripts/lib/wic/canned-wks/qemuloongarch.wks
new file mode 100644
index 0000000000..8465c7a8c0
--- /dev/null
+++ b/scripts/lib/wic/canned-wks/qemuloongarch.wks
@@ -0,0 +1,3 @@
1# short-description: Create qcow2 image for LoongArch QEMU machines
2
3part / --source rootfs --fstype=ext4 --label root --align 4096 --size 5G
diff --git a/scripts/lib/wic/canned-wks/qemux86-directdisk.wks b/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
index 22b45217f1..808997611a 100644
--- a/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
+++ b/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
@@ -4,5 +4,5 @@
4 4
5include common.wks.inc 5include common.wks.inc
6 6
7bootloader --timeout=0 --append="rw oprofile.timer=1 rootfstype=ext4 " 7bootloader --timeout=0 --append="rw oprofile.timer=1 rootfstype=ext4 console=tty console=ttyS0 "
8 8
diff --git a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks
index 63bc4dab6a..f9f8044f7d 100644
--- a/scripts/lib/wic/canned-wks/sdimage-bootpart.wks
+++ b/scripts/lib/wic/canned-wks/sdimage-bootpart.wks
@@ -2,5 +2,5 @@
2# long-description: Creates a partitioned SD card image. Boot files 2# long-description: Creates a partitioned SD card image. Boot files
3# are located in the first vfat partition. 3# are located in the first vfat partition.
4 4
5part /boot --source bootimg-partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16 5part /boot --source bootimg_partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16
6part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4 6part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --label root --align 4
diff --git a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks
index 95d7b97a60..3fb2c0e35f 100644
--- a/scripts/lib/wic/canned-wks/systemd-bootdisk.wks
+++ b/scripts/lib/wic/canned-wks/systemd-bootdisk.wks
@@ -2,7 +2,7 @@
2# long-description: Creates a partitioned EFI disk image that the user 2# long-description: Creates a partitioned EFI disk image that the user
3# can directly dd to boot media. The selected bootloader is systemd-boot. 3# can directly dd to boot media. The selected bootloader is systemd-boot.
4 4
5part /boot --source bootimg-efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid 5part /boot --source bootimg_efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid
6 6
7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid 7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
8 8
diff --git a/scripts/lib/wic/engine.py b/scripts/lib/wic/engine.py
index 674ccfc244..b9e60cbe4e 100644
--- a/scripts/lib/wic/engine.py
+++ b/scripts/lib/wic/engine.py
@@ -180,6 +180,8 @@ def wic_create(wks_file, rootfs_dir, bootimg_dir, kernel_dir,
180 os.makedirs(options.outdir) 180 os.makedirs(options.outdir)
181 181
182 pname = options.imager 182 pname = options.imager
183 # Don't support '-' in plugin names
184 pname = pname.replace("-", "_")
183 plugin_class = PluginMgr.get_plugins('imager').get(pname) 185 plugin_class = PluginMgr.get_plugins('imager').get(pname)
184 if not plugin_class: 186 if not plugin_class:
185 raise WicError('Unknown plugin: %s' % pname) 187 raise WicError('Unknown plugin: %s' % pname)
@@ -232,6 +234,16 @@ class Disk:
232 self._psector_size = None 234 self._psector_size = None
233 self._ptable_format = None 235 self._ptable_format = None
234 236
237 # define sector size
238 sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE')
239 if sector_size_str is not None:
240 try:
241 self.sector_size = int(sector_size_str)
242 except ValueError:
243 self.sector_size = None
244 else:
245 self.sector_size = None
246
235 # find parted 247 # find parted
236 # read paths from $PATH environment variable 248 # read paths from $PATH environment variable
237 # if it fails, use hardcoded paths 249 # if it fails, use hardcoded paths
@@ -258,7 +270,13 @@ class Disk:
258 def get_partitions(self): 270 def get_partitions(self):
259 if self._partitions is None: 271 if self._partitions is None:
260 self._partitions = OrderedDict() 272 self._partitions = OrderedDict()
261 out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath)) 273
274 if self.sector_size is not None:
275 out = exec_cmd("export PARTED_SECTOR_SIZE=%d; %s -sm %s unit B print" % \
276 (self.sector_size, self.parted, self.imagepath), True)
277 else:
278 out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath))
279
262 parttype = namedtuple("Part", "pnum start end size fstype") 280 parttype = namedtuple("Part", "pnum start end size fstype")
263 splitted = out.splitlines() 281 splitted = out.splitlines()
264 # skip over possible errors in exec_cmd output 282 # skip over possible errors in exec_cmd output
@@ -359,7 +377,7 @@ class Disk:
359 Remove files/dirs and their contents from the partition. 377 Remove files/dirs and their contents from the partition.
360 This only applies to ext* partition. 378 This only applies to ext* partition.
361 """ 379 """
362 abs_path = re.sub('\/\/+', '/', path) 380 abs_path = re.sub(r'\/\/+', '/', path)
363 cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs, 381 cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs,
364 self._get_part_image(pnum), 382 self._get_part_image(pnum),
365 abs_path) 383 abs_path)
diff --git a/scripts/lib/wic/filemap.py b/scripts/lib/wic/filemap.py
index 4d9da28172..85b39d5d74 100644
--- a/scripts/lib/wic/filemap.py
+++ b/scripts/lib/wic/filemap.py
@@ -46,6 +46,13 @@ def get_block_size(file_obj):
46 bsize = stat.st_blksize 46 bsize = stat.st_blksize
47 else: 47 else:
48 raise IOError("Unable to determine block size") 48 raise IOError("Unable to determine block size")
49
50 # The logic in this script only supports a maximum of a 4KB
51 # block size
52 max_block_size = 4 * 1024
53 if bsize > max_block_size:
54 bsize = max_block_size
55
49 return bsize 56 return bsize
50 57
51class ErrorNotSupp(Exception): 58class ErrorNotSupp(Exception):
diff --git a/scripts/lib/wic/help.py b/scripts/lib/wic/help.py
index 4ff7470a6a..2e3061f343 100644
--- a/scripts/lib/wic/help.py
+++ b/scripts/lib/wic/help.py
@@ -544,18 +544,18 @@ DESCRIPTION
544 the --source param given to that partition. For example, if the 544 the --source param given to that partition. For example, if the
545 partition is set up like this: 545 partition is set up like this:
546 546
547 part /boot --source bootimg-pcbios ... 547 part /boot --source bootimg_pcbios ...
548 548
549 then the methods defined as class members of the plugin having the 549 then the methods defined as class members of the plugin having the
550 matching bootimg-pcbios .name class member would be used. 550 matching bootimg_pcbios .name class member would be used.
551 551
552 To be more concrete, here's the plugin definition that would match 552 To be more concrete, here's the plugin definition that would match
553 a '--source bootimg-pcbios' usage, along with an example method 553 a '--source bootimg_pcbios' usage, along with an example method
554 that would be called by the wic implementation when it needed to 554 that would be called by the wic implementation when it needed to
555 invoke an implementation-specific partition-preparation function: 555 invoke an implementation-specific partition-preparation function:
556 556
557 class BootimgPcbiosPlugin(SourcePlugin): 557 class BootimgPcbiosPlugin(SourcePlugin):
558 name = 'bootimg-pcbios' 558 name = 'bootimg_pcbios'
559 559
560 @classmethod 560 @classmethod
561 def do_prepare_partition(self, part, ...) 561 def do_prepare_partition(self, part, ...)
@@ -794,7 +794,7 @@ DESCRIPTION
794 794
795 Here is a content of test.wks: 795 Here is a content of test.wks:
796 796
797 part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 797 part /boot --source bootimg_pcbios --ondisk sda --label boot --active --align 1024
798 part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024 798 part / --source rootfs --ondisk sda --fstype=ext3 --label platform --align 1024
799 799
800 bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0" 800 bootloader --timeout=0 --append="rootwait rootfstype=ext3 video=vesafb vga=0x318 console=tty0"
@@ -916,6 +916,10 @@ DESCRIPTION
916 will create empty partition. --size parameter has 916 will create empty partition. --size parameter has
917 to be used to specify size of empty partition. 917 to be used to specify size of empty partition.
918 918
919 --sourceparams: This option is specific to wic. Supply additional
920 parameters to the source plugin in
921 key1=value1,key2 format.
922
919 --ondisk or --ondrive: Forces the partition to be created on 923 --ondisk or --ondrive: Forces the partition to be created on
920 a particular disk. 924 a particular disk.
921 925
@@ -932,6 +936,7 @@ DESCRIPTION
932 squashfs 936 squashfs
933 erofs 937 erofs
934 swap 938 swap
939 none
935 940
936 --fsoptions: Specifies a free-form string of options to be 941 --fsoptions: Specifies a free-form string of options to be
937 used when mounting the filesystem. This string 942 used when mounting the filesystem. This string
@@ -940,6 +945,12 @@ DESCRIPTION
940 quotes. If not specified, the default string is 945 quotes. If not specified, the default string is
941 "defaults". 946 "defaults".
942 947
948 --fspassno: Specifies the order in which filesystem checks are done
949 at boot time by fsck. See fs_passno parameter of
950 fstab(5). This parameter will be copied into the
951 /etc/fstab file of the installed system. If not
952 specified the default value of "0" will be used.
953
943 --label label: Specifies the label to give to the filesystem 954 --label label: Specifies the label to give to the filesystem
944 to be made on the partition. If the given 955 to be made on the partition. If the given
945 label is already in use by another filesystem, 956 label is already in use by another filesystem,
@@ -959,6 +970,14 @@ DESCRIPTION
959 to start a partition on an x KBytes 970 to start a partition on an x KBytes
960 boundary. 971 boundary.
961 972
973 --offset: This option is specific to wic that says to place a partition
974 at exactly the specified offset. If the partition cannot be
975 placed at the specified offset, the image build will fail.
976 Specify as an integer value optionally followed by one of the
977 units s/S for 512 byte sector, k/K for kibibyte, M for
978 mebibyte and G for gibibyte. The default unit if none is
979 given is k.
980
962 --no-table: This option is specific to wic. Space will be 981 --no-table: This option is specific to wic. Space will be
963 reserved for the partition and it will be 982 reserved for the partition and it will be
964 populated but it will not be added to the 983 populated but it will not be added to the
@@ -1039,6 +1058,18 @@ DESCRIPTION
1039 not take effect when --mkfs-extraopts is used. This should be taken into 1058 not take effect when --mkfs-extraopts is used. This should be taken into
1040 account when using --mkfs-extraopts. 1059 account when using --mkfs-extraopts.
1041 1060
1061 --type: This option is specific to wic. Valid values are 'primary',
1062 'logical'. For msdos partition tables, this option specifies
1063 the partition type.
1064
1065 --hidden: This option is specific to wic. This option sets the
1066 RequiredPartition bit (bit 0) on GPT partitions.
1067
1068 --mbr: This option is specific to wic. This option is used with the
1069 gpt-hybrid partition type that uses both a GPT partition and
1070 an MBR header. Partitions with this flag will be included in
1071 this MBR header.
1072
1042 * bootloader 1073 * bootloader
1043 1074
1044 This command allows the user to specify various bootloader 1075 This command allows the user to specify various bootloader
@@ -1057,6 +1088,13 @@ DESCRIPTION
1057 file. Using this option will override any other 1088 file. Using this option will override any other
1058 bootloader option. 1089 bootloader option.
1059 1090
1091 --ptable: Specifies the partition table format. Valid values are
1092 'msdos', 'gpt', 'gpt-hybrid'.
1093
1094 --source: Specifies the source plugin. If not specified, the
1095 --source value will be copied from the partition that has
1096 /boot as mountpoint.
1097
1060 Note that bootloader functionality and boot partitions are 1098 Note that bootloader functionality and boot partitions are
1061 implemented by the various --source plugins that implement 1099 implemented by the various --source plugins that implement
1062 bootloader functionality; the bootloader command essentially 1100 bootloader functionality; the bootloader command essentially
@@ -1112,7 +1150,7 @@ COMMAND:
1112TOPIC: 1150TOPIC:
1113 overview - Presents an overall overview of Wic 1151 overview - Presents an overall overview of Wic
1114 plugins - Presents an overview and API for Wic plugins 1152 plugins - Presents an overview and API for Wic plugins
1115 kickstart - Presents a Wic kicstart file reference 1153 kickstart - Presents a Wic kickstart file reference
1116 1154
1117 1155
1118Examples: 1156Examples:
diff --git a/scripts/lib/wic/ksparser.py b/scripts/lib/wic/ksparser.py
index 0df9eb0d05..7ef3dc83dd 100644
--- a/scripts/lib/wic/ksparser.py
+++ b/scripts/lib/wic/ksparser.py
@@ -155,10 +155,11 @@ class KickStart():
155 part.add_argument('--change-directory') 155 part.add_argument('--change-directory')
156 part.add_argument("--extra-space", type=sizetype("M")) 156 part.add_argument("--extra-space", type=sizetype("M"))
157 part.add_argument('--fsoptions', dest='fsopts') 157 part.add_argument('--fsoptions', dest='fsopts')
158 part.add_argument('--fspassno', dest='fspassno')
158 part.add_argument('--fstype', default='vfat', 159 part.add_argument('--fstype', default='vfat',
159 choices=('ext2', 'ext3', 'ext4', 'btrfs', 160 choices=('ext2', 'ext3', 'ext4', 'btrfs',
160 'squashfs', 'vfat', 'msdos', 'erofs', 161 'squashfs', 'vfat', 'msdos', 'erofs',
161 'swap')) 162 'swap', 'none'))
162 part.add_argument('--mkfs-extraopts', default='') 163 part.add_argument('--mkfs-extraopts', default='')
163 part.add_argument('--label') 164 part.add_argument('--label')
164 part.add_argument('--use-label', action='store_true') 165 part.add_argument('--use-label', action='store_true')
@@ -170,6 +171,7 @@ class KickStart():
170 part.add_argument('--rootfs-dir') 171 part.add_argument('--rootfs-dir')
171 part.add_argument('--type', default='primary', 172 part.add_argument('--type', default='primary',
172 choices = ('primary', 'logical')) 173 choices = ('primary', 'logical'))
174 part.add_argument('--hidden', action='store_true')
173 175
174 # --size and --fixed-size cannot be specified together; options 176 # --size and --fixed-size cannot be specified together; options
175 # ----extra-space and --overhead-factor should also raise a parser 177 # ----extra-space and --overhead-factor should also raise a parser
@@ -186,11 +188,12 @@ class KickStart():
186 part.add_argument('--uuid') 188 part.add_argument('--uuid')
187 part.add_argument('--fsuuid') 189 part.add_argument('--fsuuid')
188 part.add_argument('--no-fstab-update', action='store_true') 190 part.add_argument('--no-fstab-update', action='store_true')
191 part.add_argument('--mbr', action='store_true')
189 192
190 bootloader = subparsers.add_parser('bootloader') 193 bootloader = subparsers.add_parser('bootloader')
191 bootloader.add_argument('--append') 194 bootloader.add_argument('--append')
192 bootloader.add_argument('--configfile') 195 bootloader.add_argument('--configfile')
193 bootloader.add_argument('--ptable', choices=('msdos', 'gpt'), 196 bootloader.add_argument('--ptable', choices=('msdos', 'gpt', 'gpt-hybrid'),
194 default='msdos') 197 default='msdos')
195 bootloader.add_argument('--timeout', type=int) 198 bootloader.add_argument('--timeout', type=int)
196 bootloader.add_argument('--source') 199 bootloader.add_argument('--source')
diff --git a/scripts/lib/wic/misc.py b/scripts/lib/wic/misc.py
index 3e11822996..1a7c140fa6 100644
--- a/scripts/lib/wic/misc.py
+++ b/scripts/lib/wic/misc.py
@@ -25,7 +25,7 @@ from wic import WicError
25logger = logging.getLogger('wic') 25logger = logging.getLogger('wic')
26 26
27# executable -> recipe pairs for exec_native_cmd 27# executable -> recipe pairs for exec_native_cmd
28NATIVE_RECIPES = {"bmaptool": "bmap-tools", 28NATIVE_RECIPES = {"bmaptool": "bmaptool",
29 "dumpe2fs": "e2fsprogs", 29 "dumpe2fs": "e2fsprogs",
30 "grub-mkimage": "grub-efi", 30 "grub-mkimage": "grub-efi",
31 "isohybrid": "syslinux", 31 "isohybrid": "syslinux",
@@ -36,6 +36,7 @@ NATIVE_RECIPES = {"bmaptool": "bmap-tools",
36 "mkdosfs": "dosfstools", 36 "mkdosfs": "dosfstools",
37 "mkisofs": "cdrtools", 37 "mkisofs": "cdrtools",
38 "mkfs.btrfs": "btrfs-tools", 38 "mkfs.btrfs": "btrfs-tools",
39 "mkfs.erofs": "erofs-utils",
39 "mkfs.ext2": "e2fsprogs", 40 "mkfs.ext2": "e2fsprogs",
40 "mkfs.ext3": "e2fsprogs", 41 "mkfs.ext3": "e2fsprogs",
41 "mkfs.ext4": "e2fsprogs", 42 "mkfs.ext4": "e2fsprogs",
@@ -140,11 +141,12 @@ def exec_native_cmd(cmd_and_args, native_sysroot, pseudo=""):
140 cmd_and_args = pseudo + cmd_and_args 141 cmd_and_args = pseudo + cmd_and_args
141 142
142 hosttools_dir = get_bitbake_var("HOSTTOOLS_DIR") 143 hosttools_dir = get_bitbake_var("HOSTTOOLS_DIR")
144 target_sys = get_bitbake_var("TARGET_SYS")
143 145
144 native_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin:%s/bin:%s" % \ 146 native_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin:%s/usr/bin/%s:%s/bin:%s" % \
145 (native_sysroot, native_sysroot, 147 (native_sysroot, native_sysroot,
146 native_sysroot, native_sysroot, 148 native_sysroot, native_sysroot, target_sys,
147 hosttools_dir) 149 native_sysroot, hosttools_dir)
148 150
149 native_cmd_and_args = "export PATH=%s:$PATH;%s" % \ 151 native_cmd_and_args = "export PATH=%s:$PATH;%s" % \
150 (native_paths, cmd_and_args) 152 (native_paths, cmd_and_args)
diff --git a/scripts/lib/wic/partition.py b/scripts/lib/wic/partition.py
index a25834048e..b34691d313 100644
--- a/scripts/lib/wic/partition.py
+++ b/scripts/lib/wic/partition.py
@@ -33,6 +33,7 @@ class Partition():
33 self.include_path = args.include_path 33 self.include_path = args.include_path
34 self.change_directory = args.change_directory 34 self.change_directory = args.change_directory
35 self.fsopts = args.fsopts 35 self.fsopts = args.fsopts
36 self.fspassno = args.fspassno
36 self.fstype = args.fstype 37 self.fstype = args.fstype
37 self.label = args.label 38 self.label = args.label
38 self.use_label = args.use_label 39 self.use_label = args.use_label
@@ -58,6 +59,8 @@ class Partition():
58 self.updated_fstab_path = None 59 self.updated_fstab_path = None
59 self.has_fstab = False 60 self.has_fstab = False
60 self.update_fstab_in_rootfs = False 61 self.update_fstab_in_rootfs = False
62 self.hidden = args.hidden
63 self.mbr = args.mbr
61 64
62 self.lineno = lineno 65 self.lineno = lineno
63 self.source_file = "" 66 self.source_file = ""
@@ -132,6 +135,8 @@ class Partition():
132 self.update_fstab_in_rootfs = True 135 self.update_fstab_in_rootfs = True
133 136
134 if not self.source: 137 if not self.source:
138 if self.fstype == "none" or self.no_table:
139 return
135 if not self.size and not self.fixed_size: 140 if not self.size and not self.fixed_size:
136 raise WicError("The %s partition has a size of zero. Please " 141 raise WicError("The %s partition has a size of zero. Please "
137 "specify a non-zero --size/--fixed-size for that " 142 "specify a non-zero --size/--fixed-size for that "
@@ -159,6 +164,9 @@ class Partition():
159 164
160 plugins = PluginMgr.get_plugins('source') 165 plugins = PluginMgr.get_plugins('source')
161 166
167 # Don't support '-' in plugin names
168 self.source = self.source.replace("-", "_")
169
162 if self.source not in plugins: 170 if self.source not in plugins:
163 raise WicError("The '%s' --source specified for %s doesn't exist.\n\t" 171 raise WicError("The '%s' --source specified for %s doesn't exist.\n\t"
164 "See 'wic list source-plugins' for a list of available" 172 "See 'wic list source-plugins' for a list of available"
@@ -171,9 +179,9 @@ class Partition():
171 # Split sourceparams string of the form key1=val1[,key2=val2,...] 179 # Split sourceparams string of the form key1=val1[,key2=val2,...]
172 # into a dict. Also accepts valueless keys i.e. without = 180 # into a dict. Also accepts valueless keys i.e. without =
173 splitted = self.sourceparams.split(',') 181 splitted = self.sourceparams.split(',')
174 srcparams_dict = dict(par.split('=', 1) for par in splitted if par) 182 srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par)
175 183
176 plugin = PluginMgr.get_plugins('source')[self.source] 184 plugin = plugins[self.source]
177 plugin.do_configure_partition(self, srcparams_dict, creator, 185 plugin.do_configure_partition(self, srcparams_dict, creator,
178 cr_workdir, oe_builddir, bootimg_dir, 186 cr_workdir, oe_builddir, bootimg_dir,
179 kernel_dir, native_sysroot) 187 kernel_dir, native_sysroot)
@@ -217,19 +225,19 @@ class Partition():
217 if (pseudo_dir): 225 if (pseudo_dir):
218 # Canonicalize the ignore paths. This corresponds to 226 # Canonicalize the ignore paths. This corresponds to
219 # calling oe.path.canonicalize(), which is used in bitbake.conf. 227 # calling oe.path.canonicalize(), which is used in bitbake.conf.
220 ignore_paths = [rootfs] + (get_bitbake_var("PSEUDO_IGNORE_PATHS") or "").split(",") 228 include_paths = [rootfs_dir] + (get_bitbake_var("PSEUDO_INCLUDE_PATHS") or "").split(",")
221 canonical_paths = [] 229 canonical_paths = []
222 for path in ignore_paths: 230 for path in include_paths:
223 if "$" not in path: 231 if "$" not in path:
224 trailing_slash = path.endswith("/") and "/" or "" 232 trailing_slash = path.endswith("/") and "/" or ""
225 canonical_paths.append(os.path.realpath(path) + trailing_slash) 233 canonical_paths.append(os.path.realpath(path) + trailing_slash)
226 ignore_paths = ",".join(canonical_paths) 234 include_paths = ",".join(canonical_paths)
227 235
228 pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix 236 pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix
229 pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir 237 pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir
230 pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir 238 pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir
231 pseudo += "export PSEUDO_NOSYMLINKEXP=1;" 239 pseudo += "export PSEUDO_NOSYMLINKEXP=1;"
232 pseudo += "export PSEUDO_IGNORE_PATHS=%s;" % ignore_paths 240 pseudo += "export PSEUDO_INCLUDE_PATHS=%s;" % include_paths
233 pseudo += "%s " % get_bitbake_var("FAKEROOTCMD") 241 pseudo += "%s " % get_bitbake_var("FAKEROOTCMD")
234 else: 242 else:
235 pseudo = None 243 pseudo = None
@@ -239,7 +247,7 @@ class Partition():
239 # from bitbake variable 247 # from bitbake variable
240 rsize_bb = get_bitbake_var('ROOTFS_SIZE') 248 rsize_bb = get_bitbake_var('ROOTFS_SIZE')
241 rdir = get_bitbake_var('IMAGE_ROOTFS') 249 rdir = get_bitbake_var('IMAGE_ROOTFS')
242 if rsize_bb and rdir == rootfs_dir: 250 if rsize_bb and (rdir == rootfs_dir or (rootfs_dir.split('/')[-2] == "tmp-wic" and rootfs_dir.split('/')[-1][:6] == "rootfs")):
243 # Bitbake variable ROOTFS_SIZE is calculated in 251 # Bitbake variable ROOTFS_SIZE is calculated in
244 # Image._get_rootfs_size method from meta/lib/oe/image.py 252 # Image._get_rootfs_size method from meta/lib/oe/image.py
245 # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT, 253 # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT,
@@ -279,6 +287,9 @@ class Partition():
279 287
280 extraopts = self.mkfs_extraopts or "-F -i 8192" 288 extraopts = self.mkfs_extraopts or "-F -i 8192"
281 289
290 # use hash_seed to generate reproducible ext4 images
291 (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, pseudo)
292
282 label_str = "" 293 label_str = ""
283 if self.label: 294 if self.label:
284 label_str = "-L %s" % self.label 295 label_str = "-L %s" % self.label
@@ -299,8 +310,49 @@ class Partition():
299 mkfs_cmd = "fsck.%s -pvfD %s" % (self.fstype, rootfs) 310 mkfs_cmd = "fsck.%s -pvfD %s" % (self.fstype, rootfs)
300 exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) 311 exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
301 312
313 if os.getenv('SOURCE_DATE_EPOCH'):
314 sde_time = hex(int(os.getenv('SOURCE_DATE_EPOCH')))
315 debugfs_script_path = os.path.join(cr_workdir, "debugfs_script")
316 files = []
317 for root, dirs, others in os.walk(rootfs_dir):
318 base = root.replace(rootfs_dir, "").rstrip(os.sep)
319 files += [ "/" if base == "" else base ]
320 files += [ base + "/" + n for n in dirs + others ]
321 with open(debugfs_script_path, "w") as f:
322 f.write("set_current_time %s\n" % (sde_time))
323 if self.updated_fstab_path and self.has_fstab and not self.no_fstab_update:
324 f.write("set_inode_field /etc/fstab mtime %s\n" % (sde_time))
325 f.write("set_inode_field /etc/fstab mtime_extra 0\n")
326 for file in set(files):
327 for time in ["atime", "ctime", "crtime"]:
328 f.write("set_inode_field \"%s\" %s %s\n" % (file, time, sde_time))
329 f.write("set_inode_field \"%s\" %s_extra 0\n" % (file, time))
330 for time in ["wtime", "mkfs_time", "lastcheck"]:
331 f.write("set_super_value %s %s\n" % (time, sde_time))
332 for time in ["mtime", "first_error_time", "last_error_time"]:
333 f.write("set_super_value %s 0\n" % (time))
334 debugfs_cmd = "debugfs -w -f %s %s" % (debugfs_script_path, rootfs)
335 exec_native_cmd(debugfs_cmd, native_sysroot)
336
302 self.check_for_Y2038_problem(rootfs, native_sysroot) 337 self.check_for_Y2038_problem(rootfs, native_sysroot)
303 338
339 def get_hash_seed_ext4(self, extraopts, pseudo):
340 if os.getenv('SOURCE_DATE_EPOCH'):
341 sde_time = int(os.getenv('SOURCE_DATE_EPOCH'))
342 if pseudo:
343 pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo)
344 else:
345 pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time
346
347 # Set hash_seed to generate deterministic directory indexes
348 namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460")
349 if self.fsuuid:
350 namespace = uuid.UUID(self.fsuuid)
351 hash_seed = str(uuid.uuid5(namespace, str(sde_time)))
352 extraopts += " -E hash_seed=%s" % hash_seed
353
354 return (extraopts, pseudo)
355
304 def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, 356 def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir,
305 native_sysroot, pseudo): 357 native_sysroot, pseudo):
306 """ 358 """
@@ -352,7 +404,7 @@ class Partition():
352 exec_native_cmd(mcopy_cmd, native_sysroot) 404 exec_native_cmd(mcopy_cmd, native_sysroot)
353 405
354 if self.updated_fstab_path and self.has_fstab and not self.no_fstab_update: 406 if self.updated_fstab_path and self.has_fstab and not self.no_fstab_update:
355 mcopy_cmd = "mcopy -i %s %s ::/etc/fstab" % (rootfs, self.updated_fstab_path) 407 mcopy_cmd = "mcopy -m -i %s %s ::/etc/fstab" % (rootfs, self.updated_fstab_path)
356 exec_native_cmd(mcopy_cmd, native_sysroot) 408 exec_native_cmd(mcopy_cmd, native_sysroot)
357 409
358 chmod_cmd = "chmod 644 %s" % rootfs 410 chmod_cmd = "chmod 644 %s" % rootfs
@@ -380,6 +432,9 @@ class Partition():
380 (extraopts, self.fsuuid, rootfs, rootfs_dir) 432 (extraopts, self.fsuuid, rootfs, rootfs_dir)
381 exec_native_cmd(erofs_cmd, native_sysroot, pseudo=pseudo) 433 exec_native_cmd(erofs_cmd, native_sysroot, pseudo=pseudo)
382 434
435 def prepare_empty_partition_none(self, rootfs, oe_builddir, native_sysroot):
436 pass
437
383 def prepare_empty_partition_ext(self, rootfs, oe_builddir, 438 def prepare_empty_partition_ext(self, rootfs, oe_builddir,
384 native_sysroot): 439 native_sysroot):
385 """ 440 """
@@ -391,13 +446,16 @@ class Partition():
391 446
392 extraopts = self.mkfs_extraopts or "-i 8192" 447 extraopts = self.mkfs_extraopts or "-i 8192"
393 448
449 # use hash_seed to generate reproducible ext4 images
450 (extraopts, pseudo) = self.get_hash_seed_ext4(extraopts, None)
451
394 label_str = "" 452 label_str = ""
395 if self.label: 453 if self.label:
396 label_str = "-L %s" % self.label 454 label_str = "-L %s" % self.label
397 455
398 mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \ 456 mkfs_cmd = "mkfs.%s -F %s %s -U %s %s" % \
399 (self.fstype, extraopts, label_str, self.fsuuid, rootfs) 457 (self.fstype, extraopts, label_str, self.fsuuid, rootfs)
400 exec_native_cmd(mkfs_cmd, native_sysroot) 458 exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
401 459
402 self.check_for_Y2038_problem(rootfs, native_sysroot) 460 self.check_for_Y2038_problem(rootfs, native_sysroot)
403 461
diff --git a/scripts/lib/wic/pluginbase.py b/scripts/lib/wic/pluginbase.py
index d9b4e57747..640da292d3 100644
--- a/scripts/lib/wic/pluginbase.py
+++ b/scripts/lib/wic/pluginbase.py
@@ -9,9 +9,11 @@ __all__ = ['ImagerPlugin', 'SourcePlugin']
9 9
10import os 10import os
11import logging 11import logging
12import types
12 13
13from collections import defaultdict 14from collections import defaultdict
14from importlib.machinery import SourceFileLoader 15import importlib
16import importlib.util
15 17
16from wic import WicError 18from wic import WicError
17from wic.misc import get_bitbake_var 19from wic.misc import get_bitbake_var
@@ -42,7 +44,7 @@ class PluginMgr:
42 path = os.path.join(layer_path, script_plugin_dir) 44 path = os.path.join(layer_path, script_plugin_dir)
43 path = os.path.abspath(os.path.expanduser(path)) 45 path = os.path.abspath(os.path.expanduser(path))
44 if path not in cls._plugin_dirs and os.path.isdir(path): 46 if path not in cls._plugin_dirs and os.path.isdir(path):
45 cls._plugin_dirs.insert(0, path) 47 cls._plugin_dirs.append(path)
46 48
47 if ptype not in PLUGINS: 49 if ptype not in PLUGINS:
48 # load all ptype plugins 50 # load all ptype plugins
@@ -54,7 +56,9 @@ class PluginMgr:
54 mname = fname[:-3] 56 mname = fname[:-3]
55 mpath = os.path.join(ppath, fname) 57 mpath = os.path.join(ppath, fname)
56 logger.debug("loading plugin module %s", mpath) 58 logger.debug("loading plugin module %s", mpath)
57 SourceFileLoader(mname, mpath).load_module() 59 spec = importlib.util.spec_from_file_location(mname, mpath)
60 module = importlib.util.module_from_spec(spec)
61 spec.loader.exec_module(module)
58 62
59 return PLUGINS.get(ptype) 63 return PLUGINS.get(ptype)
60 64
diff --git a/scripts/lib/wic/plugins/imager/direct.py b/scripts/lib/wic/plugins/imager/direct.py
index 35fff7c102..6e1f1c8cba 100644
--- a/scripts/lib/wic/plugins/imager/direct.py
+++ b/scripts/lib/wic/plugins/imager/direct.py
@@ -117,7 +117,7 @@ class DirectPlugin(ImagerPlugin):
117 updated = False 117 updated = False
118 for part in self.parts: 118 for part in self.parts:
119 if not part.realnum or not part.mountpoint \ 119 if not part.realnum or not part.mountpoint \
120 or part.mountpoint == "/" or not part.mountpoint.startswith('/'): 120 or part.mountpoint == "/" or not (part.mountpoint.startswith('/') or part.mountpoint == "swap"):
121 continue 121 continue
122 122
123 if part.use_uuid: 123 if part.use_uuid:
@@ -138,8 +138,9 @@ class DirectPlugin(ImagerPlugin):
138 device_name = "/dev/%s%s%d" % (part.disk, prefix, part.realnum) 138 device_name = "/dev/%s%s%d" % (part.disk, prefix, part.realnum)
139 139
140 opts = part.fsopts if part.fsopts else "defaults" 140 opts = part.fsopts if part.fsopts else "defaults"
141 passno = part.fspassno if part.fspassno else "0"
141 line = "\t".join([device_name, part.mountpoint, part.fstype, 142 line = "\t".join([device_name, part.mountpoint, part.fstype,
142 opts, "0", "0"]) + "\n" 143 opts, "0", passno]) + "\n"
143 144
144 fstab_lines.append(line) 145 fstab_lines.append(line)
145 updated = True 146 updated = True
@@ -148,6 +149,9 @@ class DirectPlugin(ImagerPlugin):
148 self.updated_fstab_path = os.path.join(self.workdir, "fstab") 149 self.updated_fstab_path = os.path.join(self.workdir, "fstab")
149 with open(self.updated_fstab_path, "w") as f: 150 with open(self.updated_fstab_path, "w") as f:
150 f.writelines(fstab_lines) 151 f.writelines(fstab_lines)
152 if os.getenv('SOURCE_DATE_EPOCH'):
153 fstab_time = int(os.getenv('SOURCE_DATE_EPOCH'))
154 os.utime(self.updated_fstab_path, (fstab_time, fstab_time))
151 155
152 def _full_path(self, path, name, extention): 156 def _full_path(self, path, name, extention):
153 """ Construct full file path to a file we generate. """ 157 """ Construct full file path to a file we generate. """
@@ -199,6 +203,8 @@ class DirectPlugin(ImagerPlugin):
199 source_plugin = self.ks.bootloader.source 203 source_plugin = self.ks.bootloader.source
200 disk_name = self.parts[0].disk 204 disk_name = self.parts[0].disk
201 if source_plugin: 205 if source_plugin:
206 # Don't support '-' in plugin names
207 source_plugin = source_plugin.replace("-", "_")
202 plugin = PluginMgr.get_plugins('source')[source_plugin] 208 plugin = PluginMgr.get_plugins('source')[source_plugin]
203 plugin.do_install_disk(self._image, disk_name, self, self.workdir, 209 plugin.do_install_disk(self._image, disk_name, self, self.workdir,
204 self.oe_builddir, self.bootimg_dir, 210 self.oe_builddir, self.bootimg_dir,
@@ -259,7 +265,7 @@ class DirectPlugin(ImagerPlugin):
259 if part.mountpoint == "/": 265 if part.mountpoint == "/":
260 if part.uuid: 266 if part.uuid:
261 return "PARTUUID=%s" % part.uuid 267 return "PARTUUID=%s" % part.uuid
262 elif part.label: 268 elif part.label and self.ptable_format != 'msdos':
263 return "PARTLABEL=%s" % part.label 269 return "PARTLABEL=%s" % part.label
264 else: 270 else:
265 suffix = 'p' if part.disk.startswith('mmcblk') else '' 271 suffix = 'p' if part.disk.startswith('mmcblk') else ''
@@ -309,12 +315,23 @@ class PartitionedImage():
309 # all partitions (in bytes) 315 # all partitions (in bytes)
310 self.ptable_format = ptable_format # Partition table format 316 self.ptable_format = ptable_format # Partition table format
311 # Disk system identifier 317 # Disk system identifier
312 self.identifier = random.SystemRandom().randint(1, 0xffffffff) 318 if os.getenv('SOURCE_DATE_EPOCH'):
319 self.identifier = random.Random(int(os.getenv('SOURCE_DATE_EPOCH'))).randint(1, 0xffffffff)
320 else:
321 self.identifier = random.SystemRandom().randint(1, 0xffffffff)
313 322
314 self.partitions = partitions 323 self.partitions = partitions
315 self.partimages = [] 324 self.partimages = []
316 # Size of a sector used in calculations 325 # Size of a sector used in calculations
317 self.sector_size = SECTOR_SIZE 326 sector_size_str = get_bitbake_var('WIC_SECTOR_SIZE')
327 if sector_size_str is not None:
328 try:
329 self.sector_size = int(sector_size_str)
330 except ValueError:
331 self.sector_size = SECTOR_SIZE
332 else:
333 self.sector_size = SECTOR_SIZE
334
318 self.native_sysroot = native_sysroot 335 self.native_sysroot = native_sysroot
319 num_real_partitions = len([p for p in self.partitions if not p.no_table]) 336 num_real_partitions = len([p for p in self.partitions if not p.no_table])
320 self.extra_space = extra_space 337 self.extra_space = extra_space
@@ -335,7 +352,7 @@ class PartitionedImage():
335 # generate parition and filesystem UUIDs 352 # generate parition and filesystem UUIDs
336 for part in self.partitions: 353 for part in self.partitions:
337 if not part.uuid and part.use_uuid: 354 if not part.uuid and part.use_uuid:
338 if self.ptable_format == 'gpt': 355 if self.ptable_format in ('gpt', 'gpt-hybrid'):
339 part.uuid = str(uuid.uuid4()) 356 part.uuid = str(uuid.uuid4())
340 else: # msdos partition table 357 else: # msdos partition table
341 part.uuid = '%08x-%02d' % (self.identifier, part.realnum) 358 part.uuid = '%08x-%02d' % (self.identifier, part.realnum)
@@ -391,6 +408,10 @@ class PartitionedImage():
391 raise WicError("setting custom partition type is not " \ 408 raise WicError("setting custom partition type is not " \
392 "implemented for msdos partitions") 409 "implemented for msdos partitions")
393 410
411 if part.mbr and self.ptable_format != 'gpt-hybrid':
412 raise WicError("Partition may only be included in MBR with " \
413 "a gpt-hybrid partition table")
414
394 # Get the disk where the partition is located 415 # Get the disk where the partition is located
395 self.numpart += 1 416 self.numpart += 1
396 if not part.no_table: 417 if not part.no_table:
@@ -399,7 +420,7 @@ class PartitionedImage():
399 if self.numpart == 1: 420 if self.numpart == 1:
400 if self.ptable_format == "msdos": 421 if self.ptable_format == "msdos":
401 overhead = MBR_OVERHEAD 422 overhead = MBR_OVERHEAD
402 elif self.ptable_format == "gpt": 423 elif self.ptable_format in ("gpt", "gpt-hybrid"):
403 overhead = GPT_OVERHEAD 424 overhead = GPT_OVERHEAD
404 425
405 # Skip one sector required for the partitioning scheme overhead 426 # Skip one sector required for the partitioning scheme overhead
@@ -483,7 +504,7 @@ class PartitionedImage():
483 # Once all the partitions have been layed out, we can calculate the 504 # Once all the partitions have been layed out, we can calculate the
484 # minumim disk size 505 # minumim disk size
485 self.min_size = self.offset 506 self.min_size = self.offset
486 if self.ptable_format == "gpt": 507 if self.ptable_format in ("gpt", "gpt-hybrid"):
487 self.min_size += GPT_OVERHEAD 508 self.min_size += GPT_OVERHEAD
488 509
489 self.min_size *= self.sector_size 510 self.min_size *= self.sector_size
@@ -497,29 +518,58 @@ class PartitionedImage():
497 logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors", 518 logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors",
498 parttype, start, end, size) 519 parttype, start, end, size)
499 520
500 cmd = "parted -s %s unit s mkpart %s" % (device, parttype) 521 cmd = "export PARTED_SECTOR_SIZE=%d; parted -s %s unit s mkpart %s" % \
522 (self.sector_size, device, parttype)
501 if fstype: 523 if fstype:
502 cmd += " %s" % fstype 524 cmd += " %s" % fstype
503 cmd += " %d %d" % (start, end) 525 cmd += " %d %d" % (start, end)
504 526
505 return exec_native_cmd(cmd, self.native_sysroot) 527 return exec_native_cmd(cmd, self.native_sysroot)
506 528
529 def _write_identifier(self, device, identifier):
530 logger.debug("Set disk identifier %x", identifier)
531 with open(device, 'r+b') as img:
532 img.seek(0x1B8)
533 img.write(identifier.to_bytes(4, 'little'))
534
535 def _make_disk(self, device, ptable_format, min_size):
536 logger.debug("Creating sparse file %s", device)
537 with open(device, 'w') as sparse:
538 os.ftruncate(sparse.fileno(), min_size)
539
540 logger.debug("Initializing partition table for %s", device)
541 exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s mklabel %s" %
542 (self.sector_size, device, ptable_format), self.native_sysroot)
543
544 def _write_disk_guid(self):
545 if self.ptable_format in ('gpt', 'gpt-hybrid'):
546 if os.getenv('SOURCE_DATE_EPOCH'):
547 self.disk_guid = uuid.UUID(int=int(os.getenv('SOURCE_DATE_EPOCH')))
548 else:
549 self.disk_guid = uuid.uuid4()
550
551 logger.debug("Set disk guid %s", self.disk_guid)
552 sfdisk_cmd = "sfdisk --sector-size %s --disk-id %s %s" % \
553 (self.sector_size, self.path, self.disk_guid)
554 exec_native_cmd(sfdisk_cmd, self.native_sysroot)
555
507 def create(self): 556 def create(self):
508 logger.debug("Creating sparse file %s", self.path) 557 self._make_disk(self.path,
509 with open(self.path, 'w') as sparse: 558 "gpt" if self.ptable_format == "gpt-hybrid" else self.ptable_format,
510 os.ftruncate(sparse.fileno(), self.min_size) 559 self.min_size)
511 560
512 logger.debug("Initializing partition table for %s", self.path) 561 self._write_identifier(self.path, self.identifier)
513 exec_native_cmd("parted -s %s mklabel %s" % 562 self._write_disk_guid()
514 (self.path, self.ptable_format), self.native_sysroot)
515 563
516 logger.debug("Set disk identifier %x", self.identifier) 564 if self.ptable_format == "gpt-hybrid":
517 with open(self.path, 'r+b') as img: 565 mbr_path = self.path + ".mbr"
518 img.seek(0x1B8) 566 self._make_disk(mbr_path, "msdos", self.min_size)
519 img.write(self.identifier.to_bytes(4, 'little')) 567 self._write_identifier(mbr_path, self.identifier)
520 568
521 logger.debug("Creating partitions") 569 logger.debug("Creating partitions")
522 570
571 hybrid_mbr_part_num = 0
572
523 for part in self.partitions: 573 for part in self.partitions:
524 if part.num == 0: 574 if part.num == 0:
525 continue 575 continue
@@ -564,46 +614,77 @@ class PartitionedImage():
564 self._create_partition(self.path, part.type, 614 self._create_partition(self.path, part.type,
565 parted_fs_type, part.start, part.size_sec) 615 parted_fs_type, part.start, part.size_sec)
566 616
567 if part.part_name: 617 if self.ptable_format == "gpt-hybrid" and part.mbr:
568 logger.debug("partition %d: set name to %s", 618 hybrid_mbr_part_num += 1
569 part.num, part.part_name) 619 if hybrid_mbr_part_num > 4:
570 exec_native_cmd("sgdisk --change-name=%d:%s %s" % \ 620 raise WicError("Extended MBR partitions are not supported in hybrid MBR")
571 (part.num, part.part_name, 621 self._create_partition(mbr_path, "primary",
572 self.path), self.native_sysroot) 622 parted_fs_type, part.start, part.size_sec)
573 623
624 if self.ptable_format in ("gpt", "gpt-hybrid") and (part.part_name or part.label):
625 partition_label = part.part_name if part.part_name else part.label
626 logger.debug("partition %d: set name to %s",
627 part.num, partition_label)
628 exec_native_cmd("sfdisk --sector-size %s --part-label %s %d %s" % \
629 (self.sector_size, self.path, part.num,
630 partition_label), self.native_sysroot)
574 if part.part_type: 631 if part.part_type:
575 logger.debug("partition %d: set type UID to %s", 632 logger.debug("partition %d: set type UID to %s",
576 part.num, part.part_type) 633 part.num, part.part_type)
577 exec_native_cmd("sgdisk --typecode=%d:%s %s" % \ 634 exec_native_cmd("sfdisk --sector-size %s --part-type %s %d %s" % \
578 (part.num, part.part_type, 635 (self.sector_size, self.path, part.num,
579 self.path), self.native_sysroot) 636 part.part_type), self.native_sysroot)
580 637
581 if part.uuid and self.ptable_format == "gpt": 638 if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"):
582 logger.debug("partition %d: set UUID to %s", 639 logger.debug("partition %d: set UUID to %s",
583 part.num, part.uuid) 640 part.num, part.uuid)
584 exec_native_cmd("sgdisk --partition-guid=%d:%s %s" % \ 641 exec_native_cmd("sfdisk --sector-size %s --part-uuid %s %d %s" % \
585 (part.num, part.uuid, self.path), 642 (self.sector_size, self.path, part.num, part.uuid),
586 self.native_sysroot)
587
588 if part.label and self.ptable_format == "gpt":
589 logger.debug("partition %d: set name to %s",
590 part.num, part.label)
591 exec_native_cmd("parted -s %s name %d %s" % \
592 (self.path, part.num, part.label),
593 self.native_sysroot) 643 self.native_sysroot)
594 644
595 if part.active: 645 if part.active:
596 flag_name = "legacy_boot" if self.ptable_format == 'gpt' else "boot" 646 flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot"
597 logger.debug("Set '%s' flag for partition '%s' on disk '%s'", 647 logger.debug("Set '%s' flag for partition '%s' on disk '%s'",
598 flag_name, part.num, self.path) 648 flag_name, part.num, self.path)
599 exec_native_cmd("parted -s %s set %d %s on" % \ 649 exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \
600 (self.path, part.num, flag_name), 650 (self.sector_size, self.path, part.num, flag_name),
601 self.native_sysroot) 651 self.native_sysroot)
652 if self.ptable_format == 'gpt-hybrid' and part.mbr:
653 exec_native_cmd("export PARTED_SECTOR_SIZE=%d; parted -s %s set %d %s on" % \
654 (self.sector_size, mbr_path, hybrid_mbr_part_num, "boot"),
655 self.native_sysroot)
602 if part.system_id: 656 if part.system_id:
603 exec_native_cmd("sfdisk --part-type %s %s %s" % \ 657 exec_native_cmd("sfdisk --sector-size %s --part-type %s %s %s" % \
604 (self.path, part.num, part.system_id), 658 (self.sector_size, self.path, part.num, part.system_id),
605 self.native_sysroot) 659 self.native_sysroot)
606 660
661 if part.hidden and self.ptable_format == "gpt":
662 logger.debug("Set hidden attribute for partition '%s' on disk '%s'",
663 part.num, self.path)
664 exec_native_cmd("sfdisk --sector-size %s --part-attrs %s %s RequiredPartition" % \
665 (self.sector_size, self.path, part.num),
666 self.native_sysroot)
667
668 if self.ptable_format == "gpt-hybrid":
669 # Write a protective GPT partition
670 hybrid_mbr_part_num += 1
671 if hybrid_mbr_part_num > 4:
672 raise WicError("Extended MBR partitions are not supported in hybrid MBR")
673
674 # parted cannot directly create a protective GPT partition, so
675 # create with an arbitrary type, then change it to the correct type
676 # with sfdisk
677 self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD)
678 exec_native_cmd("sfdisk --sector-size %s --part-type %s %d 0xee" % \
679 (self.sector_size, mbr_path, hybrid_mbr_part_num),
680 self.native_sysroot)
681
682 # Copy hybrid MBR
683 with open(mbr_path, "rb") as mbr_file:
684 with open(self.path, "r+b") as image_file:
685 mbr = mbr_file.read(512)
686 image_file.write(mbr)
687
607 def cleanup(self): 688 def cleanup(self):
608 pass 689 pass
609 690
diff --git a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py b/scripts/lib/wic/plugins/source/bootimg_biosplusefi.py
index 5bd7390680..4279ddded8 100644
--- a/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py
+++ b/scripts/lib/wic/plugins/source/bootimg_biosplusefi.py
@@ -13,7 +13,7 @@
13# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 13# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
14# 14#
15# DESCRIPTION 15# DESCRIPTION
16# This implements the 'bootimg-biosplusefi' source plugin class for 'wic' 16# This implements the 'bootimg_biosplusefi' source plugin class for 'wic'
17# 17#
18# AUTHORS 18# AUTHORS
19# William Bourque <wbourque [at) gmail.com> 19# William Bourque <wbourque [at) gmail.com>
@@ -34,7 +34,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
34 34
35 Note it is possible to create an image that can boot from both 35 Note it is possible to create an image that can boot from both
36 legacy BIOS and EFI by defining two partitions : one with arg 36 legacy BIOS and EFI by defining two partitions : one with arg
37 --source bootimg-efi and another one with --source bootimg-pcbios. 37 --source bootimg_efi and another one with --source bootimg_pcbios.
38 However, this method has the obvious downside that it requires TWO 38 However, this method has the obvious downside that it requires TWO
39 partitions to be created on the storage device. 39 partitions to be created on the storage device.
40 Both partitions will also be marked as "bootable" which does not work on 40 Both partitions will also be marked as "bootable" which does not work on
@@ -45,7 +45,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
45 the first partition will be duplicated into the second, even though it 45 the first partition will be duplicated into the second, even though it
46 will not be used at all. 46 will not be used at all.
47 47
48 Also, unlike "isoimage-isohybrid" that also does BIOS and EFI, this plugin 48 Also, unlike "isoimage_isohybrid" that also does BIOS and EFI, this plugin
49 allows you to have more than only a single rootfs partitions and does 49 allows you to have more than only a single rootfs partitions and does
50 not turn the rootfs into an initramfs RAM image. 50 not turn the rootfs into an initramfs RAM image.
51 51
@@ -53,32 +53,32 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
53 does not have the limitations listed above. 53 does not have the limitations listed above.
54 54
55 The plugin is made so it does tries not to reimplement what's already 55 The plugin is made so it does tries not to reimplement what's already
56 been done in other plugins; as such it imports "bootimg-pcbios" 56 been done in other plugins; as such it imports "bootimg_pcbios"
57 and "bootimg-efi". 57 and "bootimg_efi".
58 Plugin "bootimg-pcbios" is used to generate legacy BIOS boot. 58 Plugin "bootimg_pcbios" is used to generate legacy BIOS boot.
59 Plugin "bootimg-efi" is used to generate the UEFI boot. Note that it 59 Plugin "bootimg_efi" is used to generate the UEFI boot. Note that it
60 requires a --sourceparams argument to know which loader to use; refer 60 requires a --sourceparams argument to know which loader to use; refer
61 to "bootimg-efi" code/documentation for the list of loader. 61 to "bootimg_efi" code/documentation for the list of loader.
62 62
63 Imports are handled with "SourceFileLoader" from importlib as it is 63 Imports are handled with "SourceFileLoader" from importlib as it is
64 otherwise very difficult to import module that has hyphen "-" in their 64 otherwise very difficult to import module that has hyphen "-" in their
65 filename. 65 filename.
66 The SourcePlugin() methods used in the plugins (do_install_disk, 66 The SourcePlugin() methods used in the plugins (do_install_disk,
67 do_configure_partition, do_prepare_partition) are then called on both, 67 do_configure_partition, do_prepare_partition) are then called on both,
68 beginning by "bootimg-efi". 68 beginning by "bootimg_efi".
69 69
70 Plugin options, such as "--sourceparams" can still be passed to a 70 Plugin options, such as "--sourceparams" can still be passed to a
71 plugin, as long they does not cause issue in the other plugin. 71 plugin, as long they does not cause issue in the other plugin.
72 72
73 Example wic configuration: 73 Example wic configuration:
74 part /boot --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\\ 74 part /boot --source bootimg_biosplusefi --sourceparams="loader=grub-efi"\\
75 --ondisk sda --label os_boot --active --align 1024 --use-uuid 75 --ondisk sda --label os_boot --active --align 1024 --use-uuid
76 """ 76 """
77 77
78 name = 'bootimg-biosplusefi' 78 name = 'bootimg_biosplusefi'
79 79
80 __PCBIOS_MODULE_NAME = "bootimg-pcbios" 80 __PCBIOS_MODULE_NAME = "bootimg_pcbios"
81 __EFI_MODULE_NAME = "bootimg-efi" 81 __EFI_MODULE_NAME = "bootimg_efi"
82 82
83 __imgEFIObj = None 83 __imgEFIObj = None
84 __imgBiosObj = None 84 __imgBiosObj = None
@@ -100,7 +100,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
100 100
101 """ 101 """
102 102
103 # Import bootimg-pcbios (class name "BootimgPcbiosPlugin") 103 # Import bootimg_pcbios (class name "BootimgPcbiosPlugin")
104 modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), 104 modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
105 cls.__PCBIOS_MODULE_NAME + ".py") 105 cls.__PCBIOS_MODULE_NAME + ".py")
106 loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath) 106 loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath)
@@ -108,7 +108,7 @@ class BootimgBiosPlusEFIPlugin(SourcePlugin):
108 loader.exec_module(mod) 108 loader.exec_module(mod)
109 cls.__imgBiosObj = mod.BootimgPcbiosPlugin() 109 cls.__imgBiosObj = mod.BootimgPcbiosPlugin()
110 110
111 # Import bootimg-efi (class name "BootimgEFIPlugin") 111 # Import bootimg_efi (class name "BootimgEFIPlugin")
112 modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)), 112 modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
113 cls.__EFI_MODULE_NAME + ".py") 113 cls.__EFI_MODULE_NAME + ".py")
114 loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath) 114 loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath)
diff --git a/scripts/lib/wic/plugins/source/bootimg-efi.py b/scripts/lib/wic/plugins/source/bootimg_efi.py
index 0391aebdc8..cf16705a28 100644
--- a/scripts/lib/wic/plugins/source/bootimg-efi.py
+++ b/scripts/lib/wic/plugins/source/bootimg_efi.py
@@ -4,7 +4,7 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6# DESCRIPTION 6# DESCRIPTION
7# This implements the 'bootimg-efi' source plugin class for 'wic' 7# This implements the 'bootimg_efi' source plugin class for 'wic'
8# 8#
9# AUTHORS 9# AUTHORS
10# Tom Zanussi <tom.zanussi (at] linux.intel.com> 10# Tom Zanussi <tom.zanussi (at] linux.intel.com>
@@ -32,7 +32,29 @@ class BootimgEFIPlugin(SourcePlugin):
32 This plugin supports GRUB 2 and systemd-boot bootloaders. 32 This plugin supports GRUB 2 and systemd-boot bootloaders.
33 """ 33 """
34 34
35 name = 'bootimg-efi' 35 name = 'bootimg_efi'
36
37 @classmethod
38 def _copy_additional_files(cls, hdddir, initrd, dtb):
39 bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
40 if not bootimg_dir:
41 raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
42
43 if initrd:
44 initrds = initrd.split(';')
45 for rd in initrds:
46 cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, rd, hdddir)
47 out = exec_cmd(cp_cmd, True)
48 logger.debug("initrd files:\n%s" % (out))
49 else:
50 logger.debug("Ignoring missing initrd")
51
52 if dtb:
53 if ';' in dtb:
54 raise WicError("Only one DTB supported, exiting")
55 cp_cmd = "cp -v -p %s/%s %s" % (bootimg_dir, dtb, hdddir)
56 out = exec_cmd(cp_cmd, True)
57 logger.debug("dtb files:\n%s" % (out))
36 58
37 @classmethod 59 @classmethod
38 def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params): 60 def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params):
@@ -53,18 +75,9 @@ class BootimgEFIPlugin(SourcePlugin):
53 "get it from %s." % configfile) 75 "get it from %s." % configfile)
54 76
55 initrd = source_params.get('initrd') 77 initrd = source_params.get('initrd')
78 dtb = source_params.get('dtb')
56 79
57 if initrd: 80 cls._copy_additional_files(hdddir, initrd, dtb)
58 bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
59 if not bootimg_dir:
60 raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
61
62 initrds = initrd.split(';')
63 for rd in initrds:
64 cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir)
65 exec_cmd(cp_cmd, True)
66 else:
67 logger.debug("Ignoring missing initrd")
68 81
69 if not custom_cfg: 82 if not custom_cfg:
70 # Create grub configuration using parameters from wks file 83 # Create grub configuration using parameters from wks file
@@ -98,6 +111,9 @@ class BootimgEFIPlugin(SourcePlugin):
98 grubefi_conf += " /%s" % rd 111 grubefi_conf += " /%s" % rd
99 grubefi_conf += "\n" 112 grubefi_conf += "\n"
100 113
114 if dtb:
115 grubefi_conf += "devicetree /%s\n" % dtb
116
101 grubefi_conf += "}\n" 117 grubefi_conf += "}\n"
102 118
103 logger.debug("Writing grubefi config %s/hdd/boot/EFI/BOOT/grub.cfg", 119 logger.debug("Writing grubefi config %s/hdd/boot/EFI/BOOT/grub.cfg",
@@ -109,8 +125,16 @@ class BootimgEFIPlugin(SourcePlugin):
109 @classmethod 125 @classmethod
110 def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params): 126 def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params):
111 """ 127 """
112 Create loader-specific systemd-boot/gummiboot config 128 Create loader-specific systemd-boot/gummiboot config. Unified Kernel Image (uki)
129 support is done in image recipe with uki.bbclass and only systemd-boot loader config
130 and ESP partition structure is created here.
113 """ 131 """
132 # detect uki.bbclass usage
133 image_classes = get_bitbake_var("IMAGE_CLASSES").split()
134 unified_image = False
135 if "uki" in image_classes:
136 unified_image = True
137
114 install_cmd = "install -d %s/loader" % hdddir 138 install_cmd = "install -d %s/loader" % hdddir
115 exec_cmd(install_cmd) 139 exec_cmd(install_cmd)
116 140
@@ -118,35 +142,26 @@ class BootimgEFIPlugin(SourcePlugin):
118 exec_cmd(install_cmd) 142 exec_cmd(install_cmd)
119 143
120 bootloader = creator.ks.bootloader 144 bootloader = creator.ks.bootloader
121
122 loader_conf = "" 145 loader_conf = ""
123 if source_params.get('create-unified-kernel-image') != "true":
124 loader_conf += "default boot\n"
125 loader_conf += "timeout %d\n" % bootloader.timeout
126 146
127 initrd = source_params.get('initrd') 147 # 5 seconds is a sensible default timeout
128 148 loader_conf += "timeout %d\n" % (bootloader.timeout or 5)
129 if initrd and source_params.get('create-unified-kernel-image') != "true":
130 # obviously we need to have a common common deploy var
131 bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
132 if not bootimg_dir:
133 raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
134
135 initrds = initrd.split(';')
136 for rd in initrds:
137 cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir)
138 exec_cmd(cp_cmd, True)
139 else:
140 logger.debug("Ignoring missing initrd")
141 149
142 logger.debug("Writing systemd-boot config " 150 logger.debug("Writing systemd-boot config "
143 "%s/hdd/boot/loader/loader.conf", cr_workdir) 151 "%s/hdd/boot/loader/loader.conf", cr_workdir)
144 cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w") 152 cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w")
145 cfg.write(loader_conf) 153 cfg.write(loader_conf)
154 logger.debug("loader.conf:\n%s" % (loader_conf))
146 cfg.close() 155 cfg.close()
147 156
157 initrd = source_params.get('initrd')
158 dtb = source_params.get('dtb')
159 if not unified_image:
160 cls._copy_additional_files(hdddir, initrd, dtb)
161
148 configfile = creator.ks.bootloader.configfile 162 configfile = creator.ks.bootloader.configfile
149 custom_cfg = None 163 custom_cfg = None
164 boot_conf = ""
150 if configfile: 165 if configfile:
151 custom_cfg = get_custom_config(configfile) 166 custom_cfg = get_custom_config(configfile)
152 if custom_cfg: 167 if custom_cfg:
@@ -157,8 +172,7 @@ class BootimgEFIPlugin(SourcePlugin):
157 else: 172 else:
158 raise WicError("configfile is specified but failed to " 173 raise WicError("configfile is specified but failed to "
159 "get it from %s.", configfile) 174 "get it from %s.", configfile)
160 175 else:
161 if not custom_cfg:
162 # Create systemd-boot configuration using parameters from wks file 176 # Create systemd-boot configuration using parameters from wks file
163 kernel = get_bitbake_var("KERNEL_IMAGETYPE") 177 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
164 if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1": 178 if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
@@ -168,7 +182,6 @@ class BootimgEFIPlugin(SourcePlugin):
168 182
169 title = source_params.get('title') 183 title = source_params.get('title')
170 184
171 boot_conf = ""
172 boot_conf += "title %s\n" % (title if title else "boot") 185 boot_conf += "title %s\n" % (title if title else "boot")
173 boot_conf += "linux /%s\n" % kernel 186 boot_conf += "linux /%s\n" % kernel
174 187
@@ -185,11 +198,15 @@ class BootimgEFIPlugin(SourcePlugin):
185 for rd in initrds: 198 for rd in initrds:
186 boot_conf += "initrd /%s\n" % rd 199 boot_conf += "initrd /%s\n" % rd
187 200
188 if source_params.get('create-unified-kernel-image') != "true": 201 if dtb:
202 boot_conf += "devicetree /%s\n" % dtb
203
204 if not unified_image:
189 logger.debug("Writing systemd-boot config " 205 logger.debug("Writing systemd-boot config "
190 "%s/hdd/boot/loader/entries/boot.conf", cr_workdir) 206 "%s/hdd/boot/loader/entries/boot.conf", cr_workdir)
191 cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w") 207 cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w")
192 cfg.write(boot_conf) 208 cfg.write(boot_conf)
209 logger.debug("boot.conf:\n%s" % (boot_conf))
193 cfg.close() 210 cfg.close()
194 211
195 212
@@ -210,10 +227,12 @@ class BootimgEFIPlugin(SourcePlugin):
210 cls.do_configure_grubefi(hdddir, creator, cr_workdir, source_params) 227 cls.do_configure_grubefi(hdddir, creator, cr_workdir, source_params)
211 elif source_params['loader'] == 'systemd-boot': 228 elif source_params['loader'] == 'systemd-boot':
212 cls.do_configure_systemdboot(hdddir, creator, cr_workdir, source_params) 229 cls.do_configure_systemdboot(hdddir, creator, cr_workdir, source_params)
230 elif source_params['loader'] == 'uefi-kernel':
231 pass
213 else: 232 else:
214 raise WicError("unrecognized bootimg-efi loader: %s" % source_params['loader']) 233 raise WicError("unrecognized bootimg_efi loader: %s" % source_params['loader'])
215 except KeyError: 234 except KeyError:
216 raise WicError("bootimg-efi requires a loader, none specified") 235 raise WicError("bootimg_efi requires a loader, none specified")
217 236
218 if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None: 237 if get_bitbake_var("IMAGE_EFI_BOOT_FILES") is None:
219 logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES') 238 logger.debug('No boot files defined in IMAGE_EFI_BOOT_FILES')
@@ -233,7 +252,7 @@ class BootimgEFIPlugin(SourcePlugin):
233 252
234 # list of tuples (src_name, dst_name) 253 # list of tuples (src_name, dst_name)
235 deploy_files = [] 254 deploy_files = []
236 for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): 255 for src_entry in re.findall(r'[\w;\-\.\+/\*]+', boot_files):
237 if ';' in src_entry: 256 if ';' in src_entry:
238 dst_entry = tuple(src_entry.split(';')) 257 dst_entry = tuple(src_entry.split(';'))
239 if not dst_entry[0] or not dst_entry[1]: 258 if not dst_entry[0] or not dst_entry[1]:
@@ -292,90 +311,83 @@ class BootimgEFIPlugin(SourcePlugin):
292 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) 311 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
293 312
294 if source_params.get('create-unified-kernel-image') == "true": 313 if source_params.get('create-unified-kernel-image') == "true":
295 initrd = source_params.get('initrd') 314 raise WicError("create-unified-kernel-image is no longer supported. Please use uki.bbclass.")
296 if not initrd:
297 raise WicError("initrd= must be specified when create-unified-kernel-image=true, exiting")
298
299 deploy_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
300 efi_stub = glob("%s/%s" % (deploy_dir, "linux*.efi.stub"))
301 if len(efi_stub) == 0:
302 raise WicError("Unified Kernel Image EFI stub not found, exiting")
303 efi_stub = efi_stub[0]
304
305 with tempfile.TemporaryDirectory() as tmp_dir:
306 label = source_params.get('label')
307 label_conf = "root=%s" % creator.rootdev
308 if label:
309 label_conf = "LABEL=%s" % label
310
311 bootloader = creator.ks.bootloader
312 cmdline = open("%s/cmdline" % tmp_dir, "w")
313 cmdline.write("%s %s" % (label_conf, bootloader.append))
314 cmdline.close()
315 315
316 initrds = initrd.split(';') 316 if source_params.get('install-kernel-into-boot-dir') != 'false':
317 initrd = open("%s/initrd" % tmp_dir, "wb") 317 install_cmd = "install -v -p -m 0644 %s/%s %s/%s" % \
318 for f in initrds:
319 with open("%s/%s" % (deploy_dir, f), 'rb') as in_file:
320 shutil.copyfileobj(in_file, initrd)
321 initrd.close()
322
323 # Searched by systemd-boot:
324 # https://systemd.io/BOOT_LOADER_SPECIFICATION/#type-2-efi-unified-kernel-images
325 install_cmd = "install -d %s/EFI/Linux" % hdddir
326 exec_cmd(install_cmd)
327
328 staging_dir_host = get_bitbake_var("STAGING_DIR_HOST")
329
330 # https://www.freedesktop.org/software/systemd/man/systemd-stub.html
331 objcopy_cmd = "objcopy \
332 --add-section .osrel=%s --change-section-vma .osrel=0x20000 \
333 --add-section .cmdline=%s --change-section-vma .cmdline=0x30000 \
334 --add-section .linux=%s --change-section-vma .linux=0x2000000 \
335 --add-section .initrd=%s --change-section-vma .initrd=0x3000000 \
336 %s %s" % \
337 ("%s/usr/lib/os-release" % staging_dir_host,
338 cmdline.name,
339 "%s/%s" % (staging_kernel_dir, kernel),
340 initrd.name,
341 efi_stub,
342 "%s/EFI/Linux/linux.efi" % hdddir)
343 exec_cmd(objcopy_cmd)
344 else:
345 install_cmd = "install -m 0644 %s/%s %s/%s" % \
346 (staging_kernel_dir, kernel, hdddir, kernel) 318 (staging_kernel_dir, kernel, hdddir, kernel)
347 exec_cmd(install_cmd) 319 out = exec_cmd(install_cmd)
320 logger.debug("Installed kernel files:\n%s" % out)
348 321
349 if get_bitbake_var("IMAGE_EFI_BOOT_FILES"): 322 if get_bitbake_var("IMAGE_EFI_BOOT_FILES"):
350 for src_path, dst_path in cls.install_task: 323 for src_path, dst_path in cls.install_task:
351 install_cmd = "install -m 0644 -D %s %s" \ 324 install_cmd = "install -v -p -m 0644 -D %s %s" \
352 % (os.path.join(kernel_dir, src_path), 325 % (os.path.join(kernel_dir, src_path),
353 os.path.join(hdddir, dst_path)) 326 os.path.join(hdddir, dst_path))
354 exec_cmd(install_cmd) 327 out = exec_cmd(install_cmd)
328 logger.debug("Installed IMAGE_EFI_BOOT_FILES:\n%s" % out)
355 329
356 try: 330 try:
357 if source_params['loader'] == 'grub-efi': 331 if source_params['loader'] == 'grub-efi':
358 shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir, 332 shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir,
359 "%s/grub.cfg" % cr_workdir) 333 "%s/grub.cfg" % cr_workdir)
360 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]: 334 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]:
361 cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:]) 335 cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:])
362 exec_cmd(cp_cmd, True) 336 exec_cmd(cp_cmd, True)
363 shutil.move("%s/grub.cfg" % cr_workdir, 337 shutil.move("%s/grub.cfg" % cr_workdir,
364 "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir) 338 "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir)
365 elif source_params['loader'] == 'systemd-boot': 339 elif source_params['loader'] == 'systemd-boot':
366 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]: 340 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]:
367 cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:]) 341 cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:])
368 exec_cmd(cp_cmd, True) 342 out = exec_cmd(cp_cmd, True)
343 logger.debug("systemd-boot files:\n%s" % out)
344 elif source_params['loader'] == 'uefi-kernel':
345 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
346 if not kernel:
347 raise WicError("Empty KERNEL_IMAGETYPE")
348 target = get_bitbake_var("TARGET_SYS")
349 if not target:
350 raise WicError("Empty TARGET_SYS")
351
352 if re.match("x86_64", target):
353 kernel_efi_image = "bootx64.efi"
354 elif re.match('i.86', target):
355 kernel_efi_image = "bootia32.efi"
356 elif re.match('aarch64', target):
357 kernel_efi_image = "bootaa64.efi"
358 elif re.match('arm', target):
359 kernel_efi_image = "bootarm.efi"
360 else:
361 raise WicError("UEFI stub kernel is incompatible with target %s" % target)
362
363 for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]:
364 cp_cmd = "cp -v -p %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image)
365 out = exec_cmd(cp_cmd, True)
366 logger.debug("uefi-kernel files:\n%s" % out)
369 else: 367 else:
370 raise WicError("unrecognized bootimg-efi loader: %s" % 368 raise WicError("unrecognized bootimg_efi loader: %s" %
371 source_params['loader']) 369 source_params['loader'])
370
371 # must have installed at least one EFI bootloader
372 out = glob(os.path.join(hdddir, 'EFI', 'BOOT', 'boot*.efi'))
373 logger.debug("Installed EFI loader files:\n%s" % out)
374 if not out:
375 raise WicError("No EFI loaders installed to ESP partition. Check that grub-efi, systemd-boot or similar is installed.")
376
372 except KeyError: 377 except KeyError:
373 raise WicError("bootimg-efi requires a loader, none specified") 378 raise WicError("bootimg_efi requires a loader, none specified")
374 379
375 startup = os.path.join(kernel_dir, "startup.nsh") 380 startup = os.path.join(kernel_dir, "startup.nsh")
376 if os.path.exists(startup): 381 if os.path.exists(startup):
377 cp_cmd = "cp %s %s/" % (startup, hdddir) 382 cp_cmd = "cp -v -p %s %s/" % (startup, hdddir)
378 exec_cmd(cp_cmd, True) 383 out = exec_cmd(cp_cmd, True)
384 logger.debug("startup files:\n%s" % out)
385
386 for paths in part.include_path or []:
387 for path in paths:
388 cp_cmd = "cp -v -p -r %s %s/" % (path, hdddir)
389 exec_cmd(cp_cmd, True)
390 logger.debug("include_path files:\n%s" % out)
379 391
380 du_cmd = "du -bks %s" % hdddir 392 du_cmd = "du -bks %s" % hdddir
381 out = exec_cmd(du_cmd) 393 out = exec_cmd(du_cmd)
@@ -391,17 +403,26 @@ class BootimgEFIPlugin(SourcePlugin):
391 logger.debug("Added %d extra blocks to %s to get to %d total blocks", 403 logger.debug("Added %d extra blocks to %s to get to %d total blocks",
392 extra_blocks, part.mountpoint, blocks) 404 extra_blocks, part.mountpoint, blocks)
393 405
406 # required for compatibility with certain devices expecting file system
407 # block count to be equal to partition block count
408 if blocks < part.fixed_size:
409 blocks = part.fixed_size
410 logger.debug("Overriding %s to %d total blocks for compatibility",
411 part.mountpoint, blocks)
412
394 # dosfs image, created by mkdosfs 413 # dosfs image, created by mkdosfs
395 bootimg = "%s/boot.img" % cr_workdir 414 bootimg = "%s/boot.img" % cr_workdir
396 415
397 label = part.label if part.label else "ESP" 416 label = part.label if part.label else "ESP"
398 417
399 dosfs_cmd = "mkdosfs -n %s -i %s -C %s %d" % \ 418 dosfs_cmd = "mkdosfs -v -n %s -i %s -C %s %d" % \
400 (label, part.fsuuid, bootimg, blocks) 419 (label, part.fsuuid, bootimg, blocks)
401 exec_native_cmd(dosfs_cmd, native_sysroot) 420 exec_native_cmd(dosfs_cmd, native_sysroot)
421 logger.debug("mkdosfs:\n%s" % (str(out)))
402 422
403 mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (bootimg, hdddir) 423 mcopy_cmd = "mcopy -v -p -i %s -s %s/* ::/" % (bootimg, hdddir)
404 exec_native_cmd(mcopy_cmd, native_sysroot) 424 out = exec_native_cmd(mcopy_cmd, native_sysroot)
425 logger.debug("mcopy:\n%s" % (str(out)))
405 426
406 chmod_cmd = "chmod 644 %s" % bootimg 427 chmod_cmd = "chmod 644 %s" % bootimg
407 exec_cmd(chmod_cmd) 428 exec_cmd(chmod_cmd)
diff --git a/scripts/lib/wic/plugins/source/bootimg-partition.py b/scripts/lib/wic/plugins/source/bootimg_partition.py
index 5dbe2558d2..cc121a78f0 100644
--- a/scripts/lib/wic/plugins/source/bootimg-partition.py
+++ b/scripts/lib/wic/plugins/source/bootimg_partition.py
@@ -1,8 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4# DESCRIPTION 6# DESCRIPTION
5# This implements the 'bootimg-partition' source plugin class for 7# This implements the 'bootimg_partition' source plugin class for
6# 'wic'. The plugin creates an image of boot partition, copying over 8# 'wic'. The plugin creates an image of boot partition, copying over
7# files listed in IMAGE_BOOT_FILES bitbake variable. 9# files listed in IMAGE_BOOT_FILES bitbake variable.
8# 10#
@@ -14,7 +16,7 @@ import logging
14import os 16import os
15import re 17import re
16 18
17from glob import glob 19from oe.bootfiles import get_boot_files
18 20
19from wic import WicError 21from wic import WicError
20from wic.engine import get_custom_config 22from wic.engine import get_custom_config
@@ -29,7 +31,8 @@ class BootimgPartitionPlugin(SourcePlugin):
29 listed in IMAGE_BOOT_FILES bitbake variable. 31 listed in IMAGE_BOOT_FILES bitbake variable.
30 """ 32 """
31 33
32 name = 'bootimg-partition' 34 name = 'bootimg_partition'
35 image_boot_files_var_name = 'IMAGE_BOOT_FILES'
33 36
34 @classmethod 37 @classmethod
35 def do_configure_partition(cls, part, source_params, cr, cr_workdir, 38 def do_configure_partition(cls, part, source_params, cr, cr_workdir,
@@ -54,51 +57,16 @@ class BootimgPartitionPlugin(SourcePlugin):
54 else: 57 else:
55 var = "" 58 var = ""
56 59
57 boot_files = get_bitbake_var("IMAGE_BOOT_FILES" + var) 60 boot_files = get_bitbake_var(cls.image_boot_files_var_name + var)
58 if boot_files is not None: 61 if boot_files is not None:
59 break 62 break
60 63
61 if boot_files is None: 64 if boot_files is None:
62 raise WicError('No boot files defined, IMAGE_BOOT_FILES unset for entry #%d' % part.lineno) 65 raise WicError('No boot files defined, %s unset for entry #%d' % (cls.image_boot_files_var_name, part.lineno))
63 66
64 logger.debug('Boot files: %s', boot_files) 67 logger.debug('Boot files: %s', boot_files)
65 68
66 # list of tuples (src_name, dst_name) 69 cls.install_task = get_boot_files(kernel_dir, boot_files)
67 deploy_files = []
68 for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files):
69 if ';' in src_entry:
70 dst_entry = tuple(src_entry.split(';'))
71 if not dst_entry[0] or not dst_entry[1]:
72 raise WicError('Malformed boot file entry: %s' % src_entry)
73 else:
74 dst_entry = (src_entry, src_entry)
75
76 logger.debug('Destination entry: %r', dst_entry)
77 deploy_files.append(dst_entry)
78
79 cls.install_task = [];
80 for deploy_entry in deploy_files:
81 src, dst = deploy_entry
82 if '*' in src:
83 # by default install files under their basename
84 entry_name_fn = os.path.basename
85 if dst != src:
86 # unless a target name was given, then treat name
87 # as a directory and append a basename
88 entry_name_fn = lambda name: \
89 os.path.join(dst,
90 os.path.basename(name))
91
92 srcs = glob(os.path.join(kernel_dir, src))
93
94 logger.debug('Globbed sources: %s', ', '.join(srcs))
95 for entry in srcs:
96 src = os.path.relpath(entry, kernel_dir)
97 entry_dst_name = entry_name_fn(entry)
98 cls.install_task.append((src, entry_dst_name))
99 else:
100 cls.install_task.append((src, dst))
101
102 if source_params.get('loader') != "u-boot": 70 if source_params.get('loader') != "u-boot":
103 return 71 return
104 72
@@ -110,7 +78,7 @@ class BootimgPartitionPlugin(SourcePlugin):
110 # Use a custom configuration for extlinux.conf 78 # Use a custom configuration for extlinux.conf
111 extlinux_conf = custom_cfg 79 extlinux_conf = custom_cfg
112 logger.debug("Using custom configuration file " 80 logger.debug("Using custom configuration file "
113 "%s for extlinux.cfg", configfile) 81 "%s for extlinux.conf", configfile)
114 else: 82 else:
115 raise WicError("configfile is specified but failed to " 83 raise WicError("configfile is specified but failed to "
116 "get it from %s." % configfile) 84 "get it from %s." % configfile)
diff --git a/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/scripts/lib/wic/plugins/source/bootimg_pcbios.py
index 32e47f1831..21f41e00bb 100644
--- a/scripts/lib/wic/plugins/source/bootimg-pcbios.py
+++ b/scripts/lib/wic/plugins/source/bootimg_pcbios.py
@@ -4,7 +4,7 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6# DESCRIPTION 6# DESCRIPTION
7# This implements the 'bootimg-pcbios' source plugin class for 'wic' 7# This implements the 'bootimg_pcbios' source plugin class for 'wic'
8# 8#
9# AUTHORS 9# AUTHORS
10# Tom Zanussi <tom.zanussi (at] linux.intel.com> 10# Tom Zanussi <tom.zanussi (at] linux.intel.com>
@@ -27,7 +27,7 @@ class BootimgPcbiosPlugin(SourcePlugin):
27 Create MBR boot partition and install syslinux on it. 27 Create MBR boot partition and install syslinux on it.
28 """ 28 """
29 29
30 name = 'bootimg-pcbios' 30 name = 'bootimg_pcbios'
31 31
32 @classmethod 32 @classmethod
33 def _get_bootimg_dir(cls, bootimg_dir, dirname): 33 def _get_bootimg_dir(cls, bootimg_dir, dirname):
@@ -122,7 +122,7 @@ class BootimgPcbiosPlugin(SourcePlugin):
122 syslinux_conf += "DEFAULT boot\n" 122 syslinux_conf += "DEFAULT boot\n"
123 syslinux_conf += "LABEL boot\n" 123 syslinux_conf += "LABEL boot\n"
124 124
125 kernel = "/vmlinuz" 125 kernel = "/" + get_bitbake_var("KERNEL_IMAGETYPE")
126 syslinux_conf += "KERNEL " + kernel + "\n" 126 syslinux_conf += "KERNEL " + kernel + "\n"
127 127
128 syslinux_conf += "APPEND label=boot root=%s %s\n" % \ 128 syslinux_conf += "APPEND label=boot root=%s %s\n" % \
@@ -155,8 +155,8 @@ class BootimgPcbiosPlugin(SourcePlugin):
155 kernel = "%s-%s.bin" % \ 155 kernel = "%s-%s.bin" % \
156 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) 156 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
157 157
158 cmds = ("install -m 0644 %s/%s %s/vmlinuz" % 158 cmds = ("install -m 0644 %s/%s %s/%s" %
159 (staging_kernel_dir, kernel, hdddir), 159 (staging_kernel_dir, kernel, hdddir, get_bitbake_var("KERNEL_IMAGETYPE")),
160 "install -m 444 %s/syslinux/ldlinux.sys %s/ldlinux.sys" % 160 "install -m 444 %s/syslinux/ldlinux.sys %s/ldlinux.sys" %
161 (bootimg_dir, hdddir), 161 (bootimg_dir, hdddir),
162 "install -m 0644 %s/syslinux/vesamenu.c32 %s/vesamenu.c32" % 162 "install -m 0644 %s/syslinux/vesamenu.c32 %s/vesamenu.c32" %
diff --git a/scripts/lib/wic/plugins/source/empty.py b/scripts/lib/wic/plugins/source/empty.py
index 041617d648..4178912377 100644
--- a/scripts/lib/wic/plugins/source/empty.py
+++ b/scripts/lib/wic/plugins/source/empty.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -7,9 +9,19 @@
7# To use it you must pass "empty" as argument for the "--source" parameter in 9# To use it you must pass "empty" as argument for the "--source" parameter in
8# the wks file. For example: 10# the wks file. For example:
9# part foo --source empty --ondisk sda --size="1024" --align 1024 11# part foo --source empty --ondisk sda --size="1024" --align 1024
12#
13# The plugin supports writing zeros to the start of the
14# partition. This is useful to overwrite old content like
15# filesystem signatures which may be re-recognized otherwise.
16# This feature can be enabled with
17# '--sourceparams="[fill|size=<N>[S|s|K|k|M|G]][,][bs=<N>[S|s|K|k|M|G]]"'
18# Conflicting or missing options throw errors.
10 19
11import logging 20import logging
21import os
12 22
23from wic import WicError
24from wic.ksparser import sizetype
13from wic.pluginbase import SourcePlugin 25from wic.pluginbase import SourcePlugin
14 26
15logger = logging.getLogger('wic') 27logger = logging.getLogger('wic')
@@ -17,6 +29,16 @@ logger = logging.getLogger('wic')
17class EmptyPartitionPlugin(SourcePlugin): 29class EmptyPartitionPlugin(SourcePlugin):
18 """ 30 """
19 Populate unformatted empty partition. 31 Populate unformatted empty partition.
32
33 The following sourceparams are supported:
34 - fill
35 Fill the entire partition with zeros. Requires '--fixed-size' option
36 to be set.
37 - size=<N>[S|s|K|k|M|G]
38 Set the first N bytes of the partition to zero. Default unit is 'K'.
39 - bs=<N>[S|s|K|k|M|G]
40 Write at most N bytes at a time during source file creation.
41 Defaults to '1M'. Default unit is 'K'.
20 """ 42 """
21 43
22 name = 'empty' 44 name = 'empty'
@@ -29,4 +51,39 @@ class EmptyPartitionPlugin(SourcePlugin):
29 Called to do the actual content population for a partition i.e. it 51 Called to do the actual content population for a partition i.e. it
30 'prepares' the partition to be incorporated into the image. 52 'prepares' the partition to be incorporated into the image.
31 """ 53 """
32 return 54 get_byte_count = sizetype('K', True)
55 size = 0
56
57 if 'fill' in source_params and 'size' in source_params:
58 raise WicError("Conflicting source parameters 'fill' and 'size' specified, exiting.")
59
60 # Set the size of the zeros to be written to the partition
61 if 'fill' in source_params:
62 if part.fixed_size == 0:
63 raise WicError("Source parameter 'fill' only works with the '--fixed-size' option, exiting.")
64 size = get_byte_count(part.fixed_size)
65 elif 'size' in source_params:
66 size = get_byte_count(source_params['size'])
67
68 if size == 0:
69 # Nothing to do, create empty partition
70 return
71
72 if 'bs' in source_params:
73 bs = get_byte_count(source_params['bs'])
74 else:
75 bs = get_byte_count('1M')
76
77 # Create a binary file of the requested size filled with zeros
78 source_file = os.path.join(cr_workdir, 'empty-plugin-zeros%s.bin' % part.lineno)
79 if not os.path.exists(os.path.dirname(source_file)):
80 os.makedirs(os.path.dirname(source_file))
81
82 quotient, remainder = divmod(size, bs)
83 with open(source_file, 'wb') as file:
84 for _ in range(quotient):
85 file.write(bytearray(bs))
86 file.write(bytearray(remainder))
87
88 part.size = (size + 1024 - 1) // 1024 # size in KB rounded up
89 part.source_file = source_file
diff --git a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/scripts/lib/wic/plugins/source/isoimage_isohybrid.py
index afc9ea0f8f..5d42eb5d3e 100644
--- a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
+++ b/scripts/lib/wic/plugins/source/isoimage_isohybrid.py
@@ -1,8 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4# DESCRIPTION 6# DESCRIPTION
5# This implements the 'isoimage-isohybrid' source plugin class for 'wic' 7# This implements the 'isoimage_isohybrid' source plugin class for 'wic'
6# 8#
7# AUTHORS 9# AUTHORS
8# Mihaly Varga <mihaly.varga (at] ni.com> 10# Mihaly Varga <mihaly.varga (at] ni.com>
@@ -33,7 +35,7 @@ class IsoImagePlugin(SourcePlugin):
33 bootloader files. 35 bootloader files.
34 36
35 Example kickstart file: 37 Example kickstart file:
36 part /boot --source isoimage-isohybrid --sourceparams="loader=grub-efi, \\ 38 part /boot --source isoimage_isohybrid --sourceparams="loader=grub-efi, \\
37 image_name= IsoImage" --ondisk cd --label LIVECD 39 image_name= IsoImage" --ondisk cd --label LIVECD
38 bootloader --timeout=10 --append=" " 40 bootloader --timeout=10 --append=" "
39 41
@@ -43,7 +45,7 @@ class IsoImagePlugin(SourcePlugin):
43 extension added by direct imeger plugin) and a file named IsoImage-cd.iso 45 extension added by direct imeger plugin) and a file named IsoImage-cd.iso
44 """ 46 """
45 47
46 name = 'isoimage-isohybrid' 48 name = 'isoimage_isohybrid'
47 49
48 @classmethod 50 @classmethod
49 def do_configure_syslinux(cls, creator, cr_workdir): 51 def do_configure_syslinux(cls, creator, cr_workdir):
@@ -338,10 +340,10 @@ class IsoImagePlugin(SourcePlugin):
338 cls.do_configure_grubefi(part, creator, target_dir) 340 cls.do_configure_grubefi(part, creator, target_dir)
339 341
340 else: 342 else:
341 raise WicError("unrecognized bootimg-efi loader: %s" % 343 raise WicError("unrecognized bootimg_efi loader: %s" %
342 source_params['loader']) 344 source_params['loader'])
343 except KeyError: 345 except KeyError:
344 raise WicError("bootimg-efi requires a loader, none specified") 346 raise WicError("bootimg_efi requires a loader, none specified")
345 347
346 # Create efi.img that contains bootloader files for EFI booting 348 # Create efi.img that contains bootloader files for EFI booting
347 # if ISODIR didn't exist or didn't contains it 349 # if ISODIR didn't exist or didn't contains it
diff --git a/scripts/lib/wic/plugins/source/rawcopy.py b/scripts/lib/wic/plugins/source/rawcopy.py
index fa7b1eb8ac..21903c2f23 100644
--- a/scripts/lib/wic/plugins/source/rawcopy.py
+++ b/scripts/lib/wic/plugins/source/rawcopy.py
@@ -1,9 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5import logging 7import logging
6import os 8import os
9import signal
10import subprocess
7 11
8from wic import WicError 12from wic import WicError
9from wic.pluginbase import SourcePlugin 13from wic.pluginbase import SourcePlugin
@@ -21,6 +25,10 @@ class RawCopyPlugin(SourcePlugin):
21 25
22 @staticmethod 26 @staticmethod
23 def do_image_label(fstype, dst, label): 27 def do_image_label(fstype, dst, label):
28 # don't create label when fstype is none
29 if fstype == 'none':
30 return
31
24 if fstype.startswith('ext'): 32 if fstype.startswith('ext'):
25 cmd = 'tune2fs -L %s %s' % (label, dst) 33 cmd = 'tune2fs -L %s %s' % (label, dst)
26 elif fstype in ('msdos', 'vfat'): 34 elif fstype in ('msdos', 'vfat'):
@@ -38,6 +46,26 @@ class RawCopyPlugin(SourcePlugin):
38 46
39 exec_cmd(cmd) 47 exec_cmd(cmd)
40 48
49 @staticmethod
50 def do_image_uncompression(src, dst, workdir):
51 def subprocess_setup():
52 # Python installs a SIGPIPE handler by default. This is usually not what
53 # non-Python subprocesses expect.
54 # SIGPIPE errors are known issues with gzip/bash
55 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
56
57 extension = os.path.splitext(src)[1]
58 decompressor = {
59 ".bz2": "bzip2",
60 ".gz": "gzip",
61 ".xz": "xz",
62 ".zst": "zstd -f",
63 }.get(extension)
64 if not decompressor:
65 raise WicError("Not supported compressor filename extension: %s" % extension)
66 cmd = "%s -dc %s > %s" % (decompressor, src, dst)
67 subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=workdir)
68
41 @classmethod 69 @classmethod
42 def do_prepare_partition(cls, part, source_params, cr, cr_workdir, 70 def do_prepare_partition(cls, part, source_params, cr, cr_workdir,
43 oe_builddir, bootimg_dir, kernel_dir, 71 oe_builddir, bootimg_dir, kernel_dir,
@@ -56,7 +84,13 @@ class RawCopyPlugin(SourcePlugin):
56 if 'file' not in source_params: 84 if 'file' not in source_params:
57 raise WicError("No file specified") 85 raise WicError("No file specified")
58 86
59 src = os.path.join(kernel_dir, source_params['file']) 87 if 'unpack' in source_params:
88 img = os.path.join(kernel_dir, source_params['file'])
89 src = os.path.join(cr_workdir, os.path.splitext(source_params['file'])[0])
90 RawCopyPlugin.do_image_uncompression(img, src, cr_workdir)
91 else:
92 src = os.path.join(kernel_dir, source_params['file'])
93
60 dst = os.path.join(cr_workdir, "%s.%s" % (os.path.basename(source_params['file']), part.lineno)) 94 dst = os.path.join(cr_workdir, "%s.%s" % (os.path.basename(source_params['file']), part.lineno))
61 95
62 if not os.path.exists(os.path.dirname(dst)): 96 if not os.path.exists(os.path.dirname(dst)):
diff --git a/scripts/lib/wic/plugins/source/rootfs.py b/scripts/lib/wic/plugins/source/rootfs.py
index 2e34e715ca..06fce06bb1 100644
--- a/scripts/lib/wic/plugins/source/rootfs.py
+++ b/scripts/lib/wic/plugins/source/rootfs.py
@@ -35,22 +35,22 @@ class RootfsPlugin(SourcePlugin):
35 @staticmethod 35 @staticmethod
36 def __validate_path(cmd, rootfs_dir, path): 36 def __validate_path(cmd, rootfs_dir, path):
37 if os.path.isabs(path): 37 if os.path.isabs(path):
38 logger.error("%s: Must be relative: %s" % (cmd, orig_path)) 38 logger.error("%s: Must be relative: %s" % (cmd, path))
39 sys.exit(1) 39 sys.exit(1)
40 40
41 # Disallow climbing outside of parent directory using '..', 41 # Disallow climbing outside of parent directory using '..',
42 # because doing so could be quite disastrous (we will delete the 42 # because doing so could be quite disastrous (we will delete the
43 # directory, or modify a directory outside OpenEmbedded). 43 # directory, or modify a directory outside OpenEmbedded).
44 full_path = os.path.realpath(os.path.join(rootfs_dir, path)) 44 full_path = os.path.abspath(os.path.join(rootfs_dir, path))
45 if not full_path.startswith(os.path.realpath(rootfs_dir)): 45 if not full_path.startswith(os.path.realpath(rootfs_dir)):
46 logger.error("%s: Must point inside the rootfs:" % (cmd, path)) 46 logger.error("%s: Must point inside the rootfs: %s" % (cmd, path))
47 sys.exit(1) 47 sys.exit(1)
48 48
49 return full_path 49 return full_path
50 50
51 @staticmethod 51 @staticmethod
52 def __get_rootfs_dir(rootfs_dir): 52 def __get_rootfs_dir(rootfs_dir):
53 if os.path.isdir(rootfs_dir): 53 if rootfs_dir and os.path.isdir(rootfs_dir):
54 return os.path.realpath(rootfs_dir) 54 return os.path.realpath(rootfs_dir)
55 55
56 image_rootfs_dir = get_bitbake_var("IMAGE_ROOTFS", rootfs_dir) 56 image_rootfs_dir = get_bitbake_var("IMAGE_ROOTFS", rootfs_dir)
@@ -97,6 +97,9 @@ class RootfsPlugin(SourcePlugin):
97 part.has_fstab = os.path.exists(os.path.join(part.rootfs_dir, "etc/fstab")) 97 part.has_fstab = os.path.exists(os.path.join(part.rootfs_dir, "etc/fstab"))
98 pseudo_dir = os.path.join(part.rootfs_dir, "../pseudo") 98 pseudo_dir = os.path.join(part.rootfs_dir, "../pseudo")
99 if not os.path.lexists(pseudo_dir): 99 if not os.path.lexists(pseudo_dir):
100 pseudo_dir = os.path.join(cls.__get_rootfs_dir(None), '../pseudo')
101
102 if not os.path.lexists(pseudo_dir):
100 logger.warn("%s folder does not exist. " 103 logger.warn("%s folder does not exist. "
101 "Usernames and permissions will be invalid " % pseudo_dir) 104 "Usernames and permissions will be invalid " % pseudo_dir)
102 pseudo_dir = None 105 pseudo_dir = None
@@ -221,7 +224,7 @@ class RootfsPlugin(SourcePlugin):
221 if part.update_fstab_in_rootfs and part.has_fstab and not part.no_fstab_update: 224 if part.update_fstab_in_rootfs and part.has_fstab and not part.no_fstab_update:
222 fstab_path = os.path.join(new_rootfs, "etc/fstab") 225 fstab_path = os.path.join(new_rootfs, "etc/fstab")
223 # Assume that fstab should always be owned by root with fixed permissions 226 # Assume that fstab should always be owned by root with fixed permissions
224 install_cmd = "install -m 0644 %s %s" % (part.updated_fstab_path, fstab_path) 227 install_cmd = "install -m 0644 -p %s %s" % (part.updated_fstab_path, fstab_path)
225 if new_pseudo: 228 if new_pseudo:
226 pseudo = cls.__get_pseudo(native_sysroot, new_rootfs, new_pseudo) 229 pseudo = cls.__get_pseudo(native_sysroot, new_rootfs, new_pseudo)
227 else: 230 else:
diff --git a/scripts/lz4c b/scripts/lz4c
new file mode 100755
index 0000000000..466fc349e0
--- /dev/null
+++ b/scripts/lz4c
@@ -0,0 +1,26 @@
1#!/usr/bin/env bash
2
3# Wrapper to intercept legacy lz4c arguments and convert to lz4.
4args=()
5while [ $# -ne 0 ]; do
6 case ${1} in
7 -c0)
8 args+=(-0)
9 ;;
10 -c1)
11 args+=(-9)
12 ;;
13 -c2|-hc)
14 args+=(-12)
15 ;;
16 -y)
17 args+=(--force)
18 ;;
19 *)
20 args+=("${1}")
21 ;;
22 esac
23 shift
24done
25
26exec lz4 "${args[@]}"
diff --git a/scripts/makefile-getvar b/scripts/makefile-getvar
new file mode 100755
index 0000000000..4a07055e68
--- /dev/null
+++ b/scripts/makefile-getvar
@@ -0,0 +1,24 @@
1#! /bin/sh
2
3# Get a variable's value from a makefile:
4#
5# $ makefile-getvar Makefile VARIABLE VARIABLE ...
6#
7# If multiple variables are specified, they will be printed one per line.
8#
9# SPDX-FileCopyrightText: Copyright 2024 Arm Limited and/or its affiliates <open-source-office@arm.com>
10# SPDX-License-Identifier: GPL-2.0-only
11
12set -eu
13
14MAKEFILE=$1
15shift
16
17for VARIABLE in $*; do
18 make -f - $VARIABLE.var <<EOF
19include $MAKEFILE
20
21%.var:
22 @echo \$(\$*)
23EOF
24done
diff --git a/scripts/nativesdk-intercept/chgrp b/scripts/nativesdk-intercept/chgrp
index 30cc417d3a..f8ae84b8b3 100755
--- a/scripts/nativesdk-intercept/chgrp
+++ b/scripts/nativesdk-intercept/chgrp
@@ -14,7 +14,10 @@ real_chgrp = shutil.which('chgrp', path=path)
14args = list() 14args = list()
15 15
16found = False 16found = False
17for i in sys.argv: 17
18args.append(real_chgrp)
19
20for i in sys.argv[1:]:
18 if i.startswith("-"): 21 if i.startswith("-"):
19 args.append(i) 22 args.append(i)
20 continue 23 continue
diff --git a/scripts/nativesdk-intercept/chown b/scripts/nativesdk-intercept/chown
index 3914b3e384..0805ceb70a 100755
--- a/scripts/nativesdk-intercept/chown
+++ b/scripts/nativesdk-intercept/chown
@@ -14,7 +14,10 @@ real_chown = shutil.which('chown', path=path)
14args = list() 14args = list()
15 15
16found = False 16found = False
17for i in sys.argv: 17
18args.append(real_chown)
19
20for i in sys.argv[1:]:
18 if i.startswith("-"): 21 if i.startswith("-"):
19 args.append(i) 22 args.append(i)
20 continue 23 continue
diff --git a/scripts/oe-build-perf-report b/scripts/oe-build-perf-report
index 7812ea4540..a36f3c1bca 100755
--- a/scripts/oe-build-perf-report
+++ b/scripts/oe-build-perf-report
@@ -336,8 +336,16 @@ def print_html_report(data, id_comp, buildstats):
336 test_i = test_data['tests'][test] 336 test_i = test_data['tests'][test]
337 meas_i = test_i['measurements'][meas] 337 meas_i = test_i['measurements'][meas]
338 commit_num = get_data_item(meta, 'layers.meta.commit_count') 338 commit_num = get_data_item(meta, 'layers.meta.commit_count')
339 samples.append(measurement_stats(meas_i)) 339 commit = get_data_item(meta, 'layers.meta.commit')
340 # Add start_time for both test measurement types of sysres and disk usage
341 try:
342 # Use the commit_time if available, falling back to start_time
343 start_time = get_data_item(meta, 'layers.meta.commit_time')
344 except KeyError:
345 start_time = test_i['start_time'][0]
346 samples.append(measurement_stats(meas_i, '', start_time))
340 samples[-1]['commit_num'] = commit_num 347 samples[-1]['commit_num'] = commit_num
348 samples[-1]['commit'] = commit
341 349
342 absdiff = samples[-1]['val_cls'](samples[-1]['mean'] - samples[id_comp]['mean']) 350 absdiff = samples[-1]['val_cls'](samples[-1]['mean'] - samples[id_comp]['mean'])
343 reldiff = absdiff * 100 / samples[id_comp]['mean'] 351 reldiff = absdiff * 100 / samples[id_comp]['mean']
@@ -473,7 +481,7 @@ Examine build performance test results from a Git repository"""
473 group.add_argument('--branch', '-B', default='master', help="Branch to find commit in") 481 group.add_argument('--branch', '-B', default='master', help="Branch to find commit in")
474 group.add_argument('--branch2', help="Branch to find comparision revisions in") 482 group.add_argument('--branch2', help="Branch to find comparision revisions in")
475 group.add_argument('--machine', default='qemux86') 483 group.add_argument('--machine', default='qemux86')
476 group.add_argument('--history-length', default=25, type=int, 484 group.add_argument('--history-length', default=300, type=int,
477 help="Number of tested revisions to plot in html report") 485 help="Number of tested revisions to plot in html report")
478 group.add_argument('--commit', 486 group.add_argument('--commit',
479 help="Revision to search for") 487 help="Revision to search for")
diff --git a/scripts/oe-buildenv-internal b/scripts/oe-buildenv-internal
index e0d920f2fc..2fdb19565a 100755
--- a/scripts/oe-buildenv-internal
+++ b/scripts/oe-buildenv-internal
@@ -32,12 +32,12 @@ fi
32# We potentially have code that doesn't parse correctly with older versions 32# We potentially have code that doesn't parse correctly with older versions
33# of Python, and rather than fixing that and being eternally vigilant for 33# of Python, and rather than fixing that and being eternally vigilant for
34# any other new feature use, just check the version here. 34# any other new feature use, just check the version here.
35py_v35_check=$(python3 -c 'import sys; print(sys.version_info >= (3,5,0))') 35py_v38_check=$(python3 -c 'import sys; print(sys.version_info >= (3,8,0))')
36if [ "$py_v35_check" != "True" ]; then 36if [ "$py_v38_check" != "True" ]; then
37 echo >&2 "BitBake requires Python 3.5.0 or later as 'python3 (scripts/install-buildtools can be used if needed)'" 37 echo >&2 "BitBake requires Python 3.8.0 or later as 'python3' (scripts/install-buildtools can be used if needed)"
38 return 1 38 return 1
39fi 39fi
40unset py_v35_check 40unset py_v38_check
41 41
42if [ -z "$BDIR" ]; then 42if [ -z "$BDIR" ]; then
43 if [ -z "$1" ]; then 43 if [ -z "$1" ]; then
@@ -92,27 +92,28 @@ fi
92PYTHONPATH=$BITBAKEDIR/lib:$PYTHONPATH 92PYTHONPATH=$BITBAKEDIR/lib:$PYTHONPATH
93export PYTHONPATH 93export PYTHONPATH
94 94
95# Remove any paths added by sourcing this script before
96[ -n "$OE_ADDED_PATHS" ] && PATH=$(echo $PATH | sed -e "s#$OE_ADDED_PATHS##") ||
97 PATH=$(echo $PATH | sed -e "s#$OEROOT/scripts:$BITBAKEDIR/bin:##")
98
95# Make sure our paths are at the beginning of $PATH 99# Make sure our paths are at the beginning of $PATH
96for newpath in "$BITBAKEDIR/bin" "$OEROOT/scripts"; do 100OE_ADDED_PATHS="$OEROOT/scripts:$BITBAKEDIR/bin:"
97 # Remove any existences of $newpath from $PATH 101PATH="$OE_ADDED_PATHS$PATH"
98 PATH=$(echo $PATH | sed -re "s#(^|:)$newpath(:|$)#\2#g;s#^:##") 102export OE_ADDED_PATHS
99 103
100 # Add $newpath to $PATH 104# This is not needed anymore
101 PATH="$newpath:$PATH" 105unset BITBAKEDIR
102done
103unset BITBAKEDIR newpath
104 106
105# Used by the runqemu script 107# Used by the runqemu script
106export BUILDDIR 108export BUILDDIR
107export PATH
108 109
109BB_ENV_EXTRAWHITE_OE="MACHINE DISTRO TCMODE TCLIBC HTTP_PROXY http_proxy \ 110BB_ENV_PASSTHROUGH_ADDITIONS_OE="MACHINE DISTRO TCMODE TCLIBC HTTP_PROXY http_proxy \
110HTTPS_PROXY https_proxy FTP_PROXY ftp_proxy FTPS_PROXY ftps_proxy ALL_PROXY \ 111HTTPS_PROXY https_proxy FTP_PROXY ftp_proxy FTPS_PROXY ftps_proxy ALL_PROXY \
111all_proxy NO_PROXY no_proxy SSH_AGENT_PID SSH_AUTH_SOCK BB_SRCREV_POLICY \ 112all_proxy NO_PROXY no_proxy SSH_AGENT_PID SSH_AUTH_SOCK BB_SRCREV_POLICY \
112SDKMACHINE BB_NUMBER_THREADS BB_NO_NETWORK PARALLEL_MAKE GIT_PROXY_COMMAND \ 113SDKMACHINE BB_NUMBER_THREADS BB_NO_NETWORK PARALLEL_MAKE GIT_PROXY_COMMAND \
113SOCKS5_PASSWD SOCKS5_USER SCREENDIR STAMPS_DIR BBPATH_EXTRA BB_SETSCENE_ENFORCE \ 114SOCKS5_PASSWD SOCKS5_USER SCREENDIR STAMPS_DIR BBPATH_EXTRA BB_SETSCENE_ENFORCE \
114BB_LOGCONFIG" 115BB_LOGCONFIG"
115 116
116BB_ENV_EXTRAWHITE="$(echo $BB_ENV_EXTRAWHITE $BB_ENV_EXTRAWHITE_OE | tr ' ' '\n' | LC_ALL=C sort --unique | tr '\n' ' ')" 117BB_ENV_PASSTHROUGH_ADDITIONS="$(echo $BB_ENV_PASSTHROUGH_ADDITIONS $BB_ENV_PASSTHROUGH_ADDITIONS_OE | tr ' ' '\n' | LC_ALL=C sort --unique | tr '\n' ' ')"
117 118
118export BB_ENV_EXTRAWHITE 119export BB_ENV_PASSTHROUGH_ADDITIONS
diff --git a/scripts/oe-check-sstate b/scripts/oe-check-sstate
index 59bcb32a8c..0d171c4463 100755
--- a/scripts/oe-check-sstate
+++ b/scripts/oe-check-sstate
@@ -18,7 +18,6 @@ import re
18scripts_path = os.path.dirname(os.path.realpath(__file__)) 18scripts_path = os.path.dirname(os.path.realpath(__file__))
19lib_path = scripts_path + '/lib' 19lib_path = scripts_path + '/lib'
20sys.path = sys.path + [lib_path] 20sys.path = sys.path + [lib_path]
21import scriptutils
22import scriptpath 21import scriptpath
23scriptpath.add_bitbake_lib_path() 22scriptpath.add_bitbake_lib_path()
24import argparse_oe 23import argparse_oe
@@ -47,17 +46,14 @@ def check(args):
47 try: 46 try:
48 env = os.environ.copy() 47 env = os.environ.copy()
49 if not args.same_tmpdir: 48 if not args.same_tmpdir:
50 env['BB_ENV_EXTRAWHITE'] = env.get('BB_ENV_EXTRAWHITE', '') + ' TMPDIR:forcevariable' 49 env['BB_ENV_PASSTHROUGH_ADDITIONS'] = env.get('BB_ENV_PASSTHROUGH_ADDITIONS', '') + ' TMPDIR:forcevariable'
51 env['TMPDIR:forcevariable'] = tmpdir 50 env['TMPDIR:forcevariable'] = tmpdir
52 51
53 try: 52 try:
54 output = subprocess.check_output( 53 cmd = ['bitbake', '--dry-run', '--runall=build'] + args.target
55 'bitbake -n %s' % ' '.join(args.target), 54 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, env=env)
56 stderr=subprocess.STDOUT,
57 env=env,
58 shell=True)
59 55
60 task_re = re.compile('NOTE: Running setscene task [0-9]+ of [0-9]+ \(([^)]+)\)') 56 task_re = re.compile(r'NOTE: Running setscene task [0-9]+ of [0-9]+ \(([^)]+)\)')
61 tasks = [] 57 tasks = []
62 for line in output.decode('utf-8').splitlines(): 58 for line in output.decode('utf-8').splitlines():
63 res = task_re.match(line) 59 res = task_re.match(line)
diff --git a/scripts/oe-debuginfod b/scripts/oe-debuginfod
index 9e5482d869..5e70d37b8b 100755
--- a/scripts/oe-debuginfod
+++ b/scripts/oe-debuginfod
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: MIT 5# SPDX-License-Identifier: MIT
4# 6#
5 7
@@ -13,14 +15,29 @@ scriptpath.add_bitbake_lib_path()
13 15
14import bb.tinfoil 16import bb.tinfoil
15import subprocess 17import subprocess
18import argparse
16 19
17if __name__ == "__main__": 20if __name__ == "__main__":
21 p = argparse.ArgumentParser()
22 p.add_argument("-d", action='store_true', \
23 help="store debuginfod files in project sub-directory")
24
25 args = p.parse_args()
26
18 with bb.tinfoil.Tinfoil() as tinfoil: 27 with bb.tinfoil.Tinfoil() as tinfoil:
19 tinfoil.prepare(config_only=True) 28 tinfoil.prepare(config_only=True)
20 package_classes_var = "DEPLOY_DIR_" + tinfoil.config_data.getVar("PACKAGE_CLASSES").split()[0].replace("package_", "").upper() 29 package_classes_var = "DEPLOY_DIR_" + tinfoil.config_data.getVar("PACKAGE_CLASSES").split()[0].replace("package_", "").upper()
21 feed_dir = tinfoil.config_data.getVar(package_classes_var, expand=True) 30 feed_dir = tinfoil.config_data.getVar(package_classes_var, expand=True)
22 31
32 opts = [ '--verbose', '-R', '-U', feed_dir ]
33
34 if args.d:
35 fdir = os.path.join(os.getcwd(), 'oedid-files')
36 os.makedirs(fdir, exist_ok=True)
37 opts += [ '-d', os.path.join(fdir, 'did.sqlite') ]
38
23 subprocess.call(['bitbake', '-c', 'addto_recipe_sysroot', 'elfutils-native']) 39 subprocess.call(['bitbake', '-c', 'addto_recipe_sysroot', 'elfutils-native'])
24 40
25 subprocess.call(['oe-run-native', 'elfutils-native', 'debuginfod', '--verbose', '-R', '-U', feed_dir]) 41 subprocess.call(['oe-run-native', 'elfutils-native', 'debuginfod'] + opts)
42 # we should not get here
26 print("\nTo use the debuginfod server please ensure that this variable PACKAGECONFIG:pn-elfutils-native = \"debuginfod libdebuginfod\" is set in the local.conf") 43 print("\nTo use the debuginfod server please ensure that this variable PACKAGECONFIG:pn-elfutils-native = \"debuginfod libdebuginfod\" is set in the local.conf")
diff --git a/scripts/oe-depends-dot b/scripts/oe-depends-dot
index 5eb3e12769..d02ee455f6 100755
--- a/scripts/oe-depends-dot
+++ b/scripts/oe-depends-dot
@@ -14,8 +14,8 @@ import re
14class Dot(object): 14class Dot(object):
15 def __init__(self): 15 def __init__(self):
16 parser = argparse.ArgumentParser( 16 parser = argparse.ArgumentParser(
17 description="Analyse recipe-depends.dot generated by bitbake -g", 17 description="Analyse task-depends.dot generated by bitbake -g",
18 epilog="Use %(prog)s --help to get help") 18 formatter_class=argparse.RawDescriptionHelpFormatter)
19 parser.add_argument("dotfile", 19 parser.add_argument("dotfile",
20 help = "Specify the dotfile", nargs = 1, action='store', default='') 20 help = "Specify the dotfile", nargs = 1, action='store', default='')
21 parser.add_argument("-k", "--key", 21 parser.add_argument("-k", "--key",
@@ -32,6 +32,21 @@ class Dot(object):
32 " For example, A->B, B->C, A->C, then A->C can be removed.", 32 " For example, A->B, B->C, A->C, then A->C can be removed.",
33 action="store_true", default=False) 33 action="store_true", default=False)
34 34
35 parser.epilog = """
36Examples:
37First generate the .dot file:
38 bitbake -g core-image-minimal
39
40To find out why a package is being built:
41 %(prog)s -k <package> -w ./task-depends.dot
42
43To find out what a package depends on:
44 %(prog)s -k <package> -d ./task-depends.dot
45
46Reduce the .dot file packages only, no tasks:
47 %(prog)s -r ./task-depends.dot
48"""
49
35 self.args = parser.parse_args() 50 self.args = parser.parse_args()
36 51
37 if len(sys.argv) != 3 and len(sys.argv) < 5: 52 if len(sys.argv) != 3 and len(sys.argv) < 5:
@@ -99,6 +114,10 @@ class Dot(object):
99 if key == "meta-world-pkgdata": 114 if key == "meta-world-pkgdata":
100 continue 115 continue
101 dep = m.group(2) 116 dep = m.group(2)
117 key = key.split('.')[0]
118 dep = dep.split('.')[0]
119 if key == dep:
120 continue
102 if key in depends: 121 if key in depends:
103 if not key in depends[key]: 122 if not key in depends[key]:
104 depends[key].add(dep) 123 depends[key].add(dep)
@@ -140,9 +159,14 @@ class Dot(object):
140 159
141 reverse_deps = [] 160 reverse_deps = []
142 if self.args.why: 161 if self.args.why:
143 for k, v in depends.items(): 162 key_list = [self.args.key]
144 if self.args.key in v and not k in reverse_deps: 163 current_key = self.args.key
145 reverse_deps.append(k) 164 while (len(key_list) != 0):
165 current_key = key_list.pop()
166 for k, v in depends.items():
167 if current_key in v and not k in reverse_deps:
168 reverse_deps.append(k)
169 key_list.append(k)
146 print('Because: %s' % ' '.join(reverse_deps)) 170 print('Because: %s' % ' '.join(reverse_deps))
147 Dot.print_dep_chains(self.args.key, reverse_deps, depends) 171 Dot.print_dep_chains(self.args.key, reverse_deps, depends)
148 172
diff --git a/scripts/oe-find-native-sysroot b/scripts/oe-find-native-sysroot
index 5146bbf999..6228efcbee 100755
--- a/scripts/oe-find-native-sysroot
+++ b/scripts/oe-find-native-sysroot
@@ -36,20 +36,9 @@ if [ "$1" = '--help' -o "$1" = '-h' -o $# -ne 1 ] ; then
36fi 36fi
37 37
38# Global vars 38# Global vars
39BITBAKE_E=""
40set_oe_native_sysroot(){ 39set_oe_native_sysroot(){
41 echo "Running bitbake -e $1" 40 echo "Getting sysroot..."
42 BITBAKE_E="`bitbake -e $1`" 41 OECORE_NATIVE_SYSROOT=$(bitbake-getvar -r $1 --value STAGING_DIR_NATIVE)
43 OECORE_NATIVE_SYSROOT=`echo "$BITBAKE_E" | grep ^STAGING_DIR_NATIVE= | cut -d '"' -f2`
44
45 if [ "x$OECORE_NATIVE_SYSROOT" = "x" ]; then
46 # This indicates that there was an error running bitbake -e that
47 # the user needs to be informed of
48 echo "There was an error running bitbake to determine STAGING_DIR_NATIVE"
49 echo "Here is the output from bitbake -e $1"
50 echo $BITBAKE_E
51 exit 1
52 fi
53} 42}
54 43
55if [ "x$OECORE_NATIVE_SYSROOT" = "x" ]; then 44if [ "x$OECORE_NATIVE_SYSROOT" = "x" ]; then
diff --git a/scripts/oe-gnome-terminal-phonehome b/scripts/oe-gnome-terminal-phonehome
index b6b9a3867b..1352a9872b 100755
--- a/scripts/oe-gnome-terminal-phonehome
+++ b/scripts/oe-gnome-terminal-phonehome
@@ -1,5 +1,7 @@
1#!/bin/sh 1#!/bin/sh
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Gnome terminal won't tell us which PID a given command is run as 7# Gnome terminal won't tell us which PID a given command is run as
diff --git a/scripts/oe-pkgdata-browser b/scripts/oe-pkgdata-browser
index a3a381923b..c152c82b25 100755
--- a/scripts/oe-pkgdata-browser
+++ b/scripts/oe-pkgdata-browser
@@ -1,4 +1,9 @@
1#! /usr/bin/env python3 1#! /usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
2 7
3import os, sys, enum, ast 8import os, sys, enum, ast
4 9
diff --git a/scripts/oe-pkgdata-util b/scripts/oe-pkgdata-util
index 71656dadce..44ae40549a 100755
--- a/scripts/oe-pkgdata-util
+++ b/scripts/oe-pkgdata-util
@@ -296,7 +296,7 @@ def package_info(args):
296 extra = '' 296 extra = ''
297 for line in f: 297 for line in f:
298 for var in vars: 298 for var in vars:
299 m = re.match(var + '(?:_\S+)?:\s*(.+?)\s*$', line) 299 m = re.match(var + r'(?::\S+)?:\s*(.+?)\s*$', line)
300 if m: 300 if m:
301 vals[var] = m.group(1) 301 vals[var] = m.group(1)
302 pkg_version = vals['PKGV'] or '' 302 pkg_version = vals['PKGV'] or ''
diff --git a/scripts/oe-pylint b/scripts/oe-pylint
index 7cc1ccb010..5ad72838e9 100755
--- a/scripts/oe-pylint
+++ b/scripts/oe-pylint
@@ -1,5 +1,7 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Run the pylint3 against our common python module spaces and print a report of potential issues 7# Run the pylint3 against our common python module spaces and print a report of potential issues
diff --git a/scripts/oe-selftest b/scripts/oe-selftest
index 18ac0f5869..afc48d9905 100755
--- a/scripts/oe-selftest
+++ b/scripts/oe-selftest
@@ -18,8 +18,6 @@
18 18
19import os 19import os
20import sys 20import sys
21import argparse
22import logging
23 21
24scripts_path = os.path.dirname(os.path.realpath(__file__)) 22scripts_path = os.path.dirname(os.path.realpath(__file__))
25lib_path = scripts_path + '/lib' 23lib_path = scripts_path + '/lib'
diff --git a/scripts/oe-setup-build b/scripts/oe-setup-build
new file mode 100755
index 0000000000..49603d9fd1
--- /dev/null
+++ b/scripts/oe-setup-build
@@ -0,0 +1,129 @@
1#!/usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8import argparse
9import json
10import os
11import subprocess
12
13def defaultlayers():
14 return os.path.abspath(os.path.join(os.path.dirname(__file__), '.oe-layers.json'))
15
16def makebuildpath(topdir, template):
17 return os.path.join(topdir, "build-{}".format(template))
18
19def discover_templates(layers_file):
20 if not os.path.exists(layers_file):
21 raise Exception("List of layers {} does not exist; were the layers set up using the setup-layers script or bitbake-setup tool?".format(layers_file))
22
23 templates = []
24 layers_list = json.load(open(layers_file))["layers"]
25 for layer in layers_list:
26 template_dir = os.path.join(os.path.dirname(layers_file), layer, 'conf','templates')
27 if os.path.exists(template_dir):
28 for d in sorted(os.listdir(template_dir)):
29 templatepath = os.path.join(template_dir,d)
30 if not os.path.isfile(os.path.join(templatepath,'local.conf.sample')):
31 continue
32 layer_base = os.path.basename(layer)
33 templatename = "{}-{}".format(layer_base[5:] if layer_base.startswith("meta-") else layer_base, d)
34 buildpath = makebuildpath(os.getcwd(), templatename)
35 notespath = os.path.join(template_dir, d, 'conf-notes.txt')
36 try: notes = open(notespath).read()
37 except: notes = None
38 try: summary = open(os.path.join(template_dir, d, 'conf-summary.txt')).read()
39 except: summary = None
40 templates.append({"templatename":templatename,"templatepath":templatepath,"buildpath":buildpath,"notespath":notespath,"notes":notes,"summary":summary})
41
42 return templates
43
44def print_templates(templates, verbose):
45 print("Available build configurations:\n")
46
47 for i in range(len(templates)):
48 t = templates[i]
49 print("{}. {}".format(i+1, t["templatename"]))
50 print("{}".format(t["summary"].strip() if t["summary"] else "This configuration does not have a summary."))
51 if verbose:
52 print("Configuration template path:", t["templatepath"])
53 print("Build path:", t["buildpath"])
54 print("Usage notes:", t["notespath"] if t["notes"] else "This configuration does not have usage notes.")
55 print("")
56 if not verbose:
57 print("Re-run with 'list -v' to see additional information.")
58
59def list_templates(args):
60 templates = discover_templates(args.layerlist)
61 if not templates:
62 return
63
64 verbose = args.v
65 print_templates(templates, verbose)
66
67def find_template(template_name, templates):
68 print_templates(templates, False)
69 if not template_name:
70 n_s = input("Please choose a configuration by its number: ")
71 try: return templates[int(n_s) - 1]
72 except:
73 print("Invalid selection, please try again.")
74 return None
75 else:
76 for t in templates:
77 if t["templatename"] == template_name:
78 return t
79 raise Exception("Configuration {} is not one of {}, please try again.".format(template_name, [t["templatename"] for t in templates]))
80
81def setup_build_env(args):
82 templates = discover_templates(args.layerlist)
83 if not templates:
84 return
85
86 template = find_template(args.c, templates)
87 if not template:
88 return
89 builddir = args.b if args.b else template["buildpath"]
90 no_shell = args.no_shell
91 coredir = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
92 cmd_base = ". {} {}".format(os.path.join(coredir, 'oe-init-build-env'), os.path.abspath(builddir))
93
94 initbuild = os.path.join(builddir, 'init-build-env')
95 if not os.path.exists(initbuild):
96 os.makedirs(builddir, exist_ok=True)
97 with open(initbuild, 'w') as f:
98 f.write(cmd_base)
99 print("\nRun '. {}' to initialize the build in a current shell session.\n".format(initbuild))
100
101 cmd = "TEMPLATECONF={} {}".format(template["templatepath"], cmd_base)
102 if not no_shell:
103 cmd = cmd + " && {}".format(os.environ.get('SHELL','bash'))
104 print("Running:", cmd)
105 subprocess.run(cmd, shell=True, executable=os.environ.get('SHELL','bash'))
106
107parser = argparse.ArgumentParser(description="A script that discovers available build configurations and sets up a build environment based on one of them. Run without arguments to choose one interactively.")
108parser.add_argument("--layerlist", default=defaultlayers(), help='Where to look for available layers (as written out by setup-layers script) (default is {}).'.format(defaultlayers()))
109
110subparsers = parser.add_subparsers()
111parser_list_templates = subparsers.add_parser('list', help='List available configurations')
112parser_list_templates.add_argument('-v', action='store_true',
113 help='Print detailed information and usage notes for each available build configuration.')
114parser_list_templates.set_defaults(func=list_templates)
115
116parser_setup_env = subparsers.add_parser('setup', help='Set up a build environment and open a shell session with it, ready to run builds.')
117parser_setup_env.add_argument('-c', metavar='configuration_name', help="Use a build configuration configuration_name to set up a build environment (run this script with 'list' to see what is available)")
118parser_setup_env.add_argument('-b', metavar='build_path', help="Set up a build directory in build_path (run this script with 'list -v' to see where it would be by default)")
119parser_setup_env.add_argument('--no-shell', action='store_true',
120 help='Create a build directory but do not start a shell session with the build environment from it.')
121parser_setup_env.set_defaults(func=setup_build_env)
122
123args = parser.parse_args()
124
125if 'func' in args:
126 args.func(args)
127else:
128 from argparse import Namespace
129 setup_build_env(Namespace(layerlist=args.layerlist, c=None, b=None, no_shell=False))
diff --git a/scripts/oe-setup-builddir b/scripts/oe-setup-builddir
index 5a51fa793f..dcb384c33a 100755
--- a/scripts/oe-setup-builddir
+++ b/scripts/oe-setup-builddir
@@ -7,12 +7,14 @@
7# SPDX-License-Identifier: GPL-2.0-or-later 7# SPDX-License-Identifier: GPL-2.0-or-later
8# 8#
9 9
10if [ -z "$BUILDDIR" ]; then 10die() {
11 echo >&2 "Error: The build directory (BUILDDIR) must be set!" 11 echo Error: "$@" >&2
12 exit 1 12 exit 1
13fi 13}
14
15[ -n "$BUILDDIR" ] || die "The build directory (BUILDDIR) must be set!"
14 16
15if [ "$1" = '--help' -o "$1" = '-h' ]; then 17if [ "$1" = '--help' ] || [ "$1" = '-h' ]; then
16 echo 'Usage: oe-setup-builddir' 18 echo 'Usage: oe-setup-builddir'
17 echo '' 19 echo ''
18 echo "OpenEmbedded setup-builddir - setup build directory $BUILDDIR" 20 echo "OpenEmbedded setup-builddir - setup build directory $BUILDDIR"
@@ -22,33 +24,22 @@ fi
22 24
23mkdir -p "$BUILDDIR/conf" 25mkdir -p "$BUILDDIR/conf"
24 26
25if [ ! -d "$BUILDDIR" ]; then 27[ -d "$BUILDDIR" ] || die "The build directory ($BUILDDIR) does not exist!"
26 echo >&2 "Error: The builddir ($BUILDDIR) does not exist!" 28[ -w "$BUILDDIR" ] ||
27 exit 1 29 die "Cannot write to $BUILDDIR, perhaps try sourcing with a writable path? i.e. . oe-init-build-env ~/my-build"
28fi
29
30if [ ! -w "$BUILDDIR" ]; then
31 echo >&2 "Error: Cannot write to $BUILDDIR, perhaps try sourcing with a writable path? i.e. . oe-init-build-env ~/my-build"
32 exit 1
33fi
34 30
35# Attempting removal of sticky,setuid bits from BUILDDIR, BUILDDIR/conf 31# Attempting removal of sticky,setuid bits from BUILDDIR, BUILDDIR/conf
36chmod -st "$BUILDDIR" 2>/dev/null || echo "WARNING: unable to chmod $BUILDDIR" 32chmod -st "$BUILDDIR" 2>/dev/null || echo "WARNING: unable to chmod $BUILDDIR"
37chmod -st "$BUILDDIR/conf" 2>/dev/null || echo "WARNING: unable to chmod $BUILDDIR/conf" 33chmod -st "$BUILDDIR/conf" 2>/dev/null || echo "WARNING: unable to chmod $BUILDDIR/conf"
38 34
39cd "$BUILDDIR" 35cd "$BUILDDIR" || die "Failed to change directory to $BUILDDIR!"
40 36
41if [ -f "$BUILDDIR/conf/templateconf.cfg" ]; then 37. "$OEROOT/.templateconf"
42 TEMPLATECONF=$(cat "$BUILDDIR/conf/templateconf.cfg")
43fi
44
45. $OEROOT/.templateconf
46 38
47if [ ! -f "$BUILDDIR/conf/templateconf.cfg" ]; then 39# Keep the original TEMPLATECONF before possibly prefixing it with $OEROOT below.
48 echo "$TEMPLATECONF" >"$BUILDDIR/conf/templateconf.cfg" 40ORG_TEMPLATECONF=$TEMPLATECONF
49fi
50 41
51# 42#
52# $TEMPLATECONF can point to a directory for the template local.conf & bblayers.conf 43# $TEMPLATECONF can point to a directory for the template local.conf & bblayers.conf
53# 44#
54if [ -n "$TEMPLATECONF" ]; then 45if [ -n "$TEMPLATECONF" ]; then
@@ -57,73 +48,94 @@ if [ -n "$TEMPLATECONF" ]; then
57 if [ -d "$OEROOT/$TEMPLATECONF" ]; then 48 if [ -d "$OEROOT/$TEMPLATECONF" ]; then
58 TEMPLATECONF="$OEROOT/$TEMPLATECONF" 49 TEMPLATECONF="$OEROOT/$TEMPLATECONF"
59 fi 50 fi
60 if [ ! -d "$TEMPLATECONF" ]; then 51 [ -d "$TEMPLATECONF" ] ||
61 echo >&2 "Error: TEMPLATECONF value points to nonexistent directory '$TEMPLATECONF'" 52 die "TEMPLATECONF value points to nonexistent directory '$TEMPLATECONF'"
62 exit 1 53 fi
63 fi 54 templatesdir=$(python3 -c "import sys; print(sys.argv[1].strip('/').split('/')[-2])" "$TEMPLATECONF")
55 if [ "$templatesdir" != templates ] || [ ! -f "$TEMPLATECONF/../../layer.conf" ]; then
56 die "TEMPLATECONF value (which is $TEMPLATECONF) must point to meta-some-layer/conf/templates/template-name"
64 fi 57 fi
65 OECORELAYERCONF="$TEMPLATECONF/bblayers.conf.sample" 58 OECORELAYERCONF="$TEMPLATECONF/bblayers.conf.sample"
66 OECORELOCALCONF="$TEMPLATECONF/local.conf.sample" 59 OECORELOCALCONF="$TEMPLATECONF/local.conf.sample"
60 OECORESUMMARYCONF="$TEMPLATECONF/conf-summary.txt"
67 OECORENOTESCONF="$TEMPLATECONF/conf-notes.txt" 61 OECORENOTESCONF="$TEMPLATECONF/conf-notes.txt"
68fi 62fi
69 63
70unset SHOWYPDOC 64unset SHOWYPDOC
71if [ -z "$OECORELOCALCONF" ]; then 65if [ -z "$OECORELOCALCONF" ]; then
72 OECORELOCALCONF="$OEROOT/meta/conf/local.conf.sample" 66 OECORELOCALCONF="$OEROOT/meta/conf/templates/default/local.conf.sample"
73fi 67fi
74if [ ! -r "$BUILDDIR/conf/local.conf" ]; then 68if [ ! -r "$BUILDDIR/conf/local.conf" ]; then
75 cat <<EOM 69 cat <<EOM
76You had no conf/local.conf file. This configuration file has therefore been 70You had no conf/local.conf file. This configuration file has therefore been
77created for you with some default values. You may wish to edit it to, for 71created for you from $OECORELOCALCONF
78example, select a different MACHINE (target hardware). See conf/local.conf 72You may wish to edit it to, for example, select a different MACHINE (target
79for more information as common configuration options are commented. 73hardware).
80 74
81EOM 75EOM
82 cp -f $OECORELOCALCONF "$BUILDDIR/conf/local.conf" 76 cp -f "$OECORELOCALCONF" "$BUILDDIR/conf/local.conf"
83 SHOWYPDOC=yes 77 SHOWYPDOC=yes
84fi 78fi
85 79
86if [ -z "$OECORELAYERCONF" ]; then 80if [ -z "$OECORELAYERCONF" ]; then
87 OECORELAYERCONF="$OEROOT/meta/conf/bblayers.conf.sample" 81 OECORELAYERCONF="$OEROOT/meta/conf/templates/default/bblayers.conf.sample"
88fi 82fi
89if [ ! -r "$BUILDDIR/conf/bblayers.conf" ]; then 83if [ ! -r "$BUILDDIR/conf/bblayers.conf" ]; then
90 cat <<EOM 84 cat <<EOM
91You had no conf/bblayers.conf file. This configuration file has therefore been 85You had no conf/bblayers.conf file. This configuration file has therefore been
92created for you with some default values. To add additional metadata layers 86created for you from $OECORELAYERCONF
93into your configuration please add entries to conf/bblayers.conf. 87To add additional metadata layers into your configuration please add entries
88to conf/bblayers.conf.
94 89
95EOM 90EOM
96 91
97 # Put the abosolute path to the layers in bblayers.conf so we can run 92 # Put the absolute path to the layers in bblayers.conf so we can run
98 # bitbake without the init script after the first run 93 # bitbake without the init script after the first run.
99 # ##COREBASE## is deprecated as it's meaning was inconsistent, but continue 94 # ##COREBASE## is deprecated as its meaning was inconsistent, but continue
100 # to replace it for compatibility. 95 # to replace it for compatibility.
101 sed -e "s|##OEROOT##|$OEROOT|g" \ 96 sed -e "s|##OEROOT##|$OEROOT|g" \
102 -e "s|##COREBASE##|$OEROOT|g" \ 97 -e "s|##COREBASE##|$OEROOT|g" \
103 $OECORELAYERCONF > "$BUILDDIR/conf/bblayers.conf" 98 "$OECORELAYERCONF" > "$BUILDDIR/conf/bblayers.conf"
104 SHOWYPDOC=yes 99 SHOWYPDOC=yes
105fi 100fi
106 101
102if [ -z "$OECORESUMMARYCONF" ]; then
103 OECORESUMMARYCONF="$OEROOT/meta/conf/templates/default/conf-summary.txt"
104fi
105if [ ! -r "$BUILDDIR/conf/conf-summary.txt" ]; then
106 [ ! -r "$OECORESUMMARYCONF" ] || cp "$OECORESUMMARYCONF" "$BUILDDIR/conf/conf-summary.txt"
107fi
108
109if [ -z "$OECORENOTESCONF" ]; then
110 OECORENOTESCONF="$OEROOT/meta/conf/templates/default/conf-notes.txt"
111fi
112if [ ! -r "$BUILDDIR/conf/conf-notes.txt" ]; then
113 [ ! -r "$OECORENOTESCONF" ] || cp "$OECORENOTESCONF" "$BUILDDIR/conf/conf-notes.txt"
114fi
115
107# Prevent disturbing a new GIT clone in same console 116# Prevent disturbing a new GIT clone in same console
108unset OECORELOCALCONF 117unset OECORELOCALCONF
109unset OECORELAYERCONF 118unset OECORELAYERCONF
119unset OECORESUMMARYCONF
120unset OECORENOTESCONF
110 121
111# Ending the first-time run message. Show the YP Documentation banner. 122# Ending the first-time run message. Show the YP Documentation banner.
112if [ ! -z "$SHOWYPDOC" ]; then 123if [ -n "$SHOWYPDOC" ]; then
113 cat <<EOM 124 cat <<EOM
114The Yocto Project has extensive documentation about OE including a reference 125The Yocto Project has extensive documentation about OE including a reference
115manual which can be found at: 126manual which can be found at:
116 https://docs.yoctoproject.org 127 https://docs.yoctoproject.org
117 128
118For more information about OpenEmbedded see their website: 129For more information about OpenEmbedded see the website:
119 https://www.openembedded.org/ 130 https://www.openembedded.org/
120 131
121EOM 132EOM
122# unset SHOWYPDOC 133# unset SHOWYPDOC
123fi 134fi
124 135
125if [ -z "$OECORENOTESCONF" ]; then 136[ ! -r "$BUILDDIR/conf/conf-summary.txt" ] || cat "$BUILDDIR/conf/conf-summary.txt"
126 OECORENOTESCONF="$OEROOT/meta/conf/conf-notes.txt" 137[ ! -r "$BUILDDIR/conf/conf-notes.txt" ] || cat "$BUILDDIR/conf/conf-notes.txt"
138
139if [ ! -f "$BUILDDIR/conf/templateconf.cfg" ]; then
140 echo "$ORG_TEMPLATECONF" >"$BUILDDIR/conf/templateconf.cfg"
127fi 141fi
128[ ! -r "$OECORENOTESCONF" ] || cat $OECORENOTESCONF
129unset OECORENOTESCONF
diff --git a/scripts/oe-setup-layers b/scripts/oe-setup-layers
new file mode 100755
index 0000000000..6fbfefd656
--- /dev/null
+++ b/scripts/oe-setup-layers
@@ -0,0 +1,146 @@
1#!/usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8# This file was copied from poky(or oe-core)/scripts/oe-setup-layers by running
9#
10# bitbake-layers create-layers-setup destdir
11#
12# It is recommended that you do not modify this file directly, but rather re-run the above command to get the freshest upstream copy.
13#
14# This script is idempotent. Subsequent runs only change what is necessary to
15# ensure your layers match your configuration.
16
17import argparse
18import json
19import os
20import subprocess
21
22def _is_repo_git_repo(repodir):
23 try:
24 curr_toplevel = subprocess.check_output("git -C %s rev-parse --show-toplevel" % repodir, shell=True, stderr=subprocess.DEVNULL)
25 if curr_toplevel.strip().decode("utf-8") == repodir:
26 return True
27 except subprocess.CalledProcessError:
28 pass
29 return False
30
31def _is_repo_at_rev(repodir, rev):
32 try:
33 curr_rev = subprocess.check_output("git -C %s rev-parse HEAD" % repodir, shell=True, stderr=subprocess.DEVNULL)
34 if curr_rev.strip().decode("utf-8") == rev:
35 return True
36 except subprocess.CalledProcessError:
37 pass
38 return False
39
40def _is_repo_at_remote_uri(repodir, remote, uri):
41 try:
42 curr_uri = subprocess.check_output("git -C %s remote get-url %s" % (repodir, remote), shell=True, stderr=subprocess.DEVNULL)
43 if curr_uri.strip().decode("utf-8") == uri:
44 return True
45 except subprocess.CalledProcessError:
46 pass
47 return False
48
49def _contains_submodules(repodir):
50 return os.path.exists(os.path.join(repodir,".gitmodules"))
51
52def _write_layer_list(dest, repodirs):
53 layers = []
54 for r in repodirs:
55 for root, dirs, files in os.walk(r):
56 if os.path.basename(root) == 'conf' and 'layer.conf' in files:
57 layers.append(os.path.relpath(os.path.dirname(root), dest))
58 layers_f = os.path.join(dest, ".oe-layers.json")
59 print("Writing list of layers into {}".format(layers_f))
60 with open(layers_f, 'w') as f:
61 json.dump({"version":"1.0","layers":layers}, f, sort_keys=True, indent=4)
62
63def _do_checkout(args, json):
64 repos = json['sources']
65 repodirs = []
66 oesetupbuild = None
67 for r_name in repos:
68 r_data = repos[r_name]
69 repodir = os.path.abspath(os.path.join(args['destdir'], r_data['path']))
70 repodirs.append(repodir)
71
72 if 'contains_this_file' in r_data.keys():
73 force_arg = 'force_bootstraplayer_checkout'
74 if not args[force_arg]:
75 print('Note: not checking out source {repo}, use {repoflag} to override.'.format(repo=r_name, repoflag='--force-bootstraplayer-checkout'))
76 continue
77 r_remote = r_data['git-remote']
78 rev = r_remote['rev']
79 desc = r_remote['describe']
80 if not desc:
81 desc = rev[:10]
82 branch = r_remote['branch']
83 remotes = r_remote['remotes']
84
85 print('\nSetting up source {}, revision {}, branch {}'.format(r_name, desc, branch))
86 if not _is_repo_git_repo(repodir):
87 cmd = 'git init -q {}'.format(repodir)
88 print("Running '{}'".format(cmd))
89 subprocess.check_output(cmd, shell=True)
90
91 for remote in remotes:
92 if not _is_repo_at_remote_uri(repodir, remote, remotes[remote]['uri']):
93 cmd = "git remote remove {} > /dev/null 2>&1; git remote add {} {}".format(remote, remote, remotes[remote]['uri'])
94 print("Running '{}' in {}".format(cmd, repodir))
95 subprocess.check_output(cmd, shell=True, cwd=repodir)
96
97 cmd = "git fetch -q {} || true".format(remote)
98 print("Running '{}' in {}".format(cmd, repodir))
99 subprocess.check_output(cmd, shell=True, cwd=repodir)
100
101 if not _is_repo_at_rev(repodir, rev):
102 cmd = "git fetch -q --all || true"
103 print("Running '{}' in {}".format(cmd, repodir))
104 subprocess.check_output(cmd, shell=True, cwd=repodir)
105
106 cmd = 'git checkout -q {}'.format(rev)
107 print("Running '{}' in {}".format(cmd, repodir))
108 subprocess.check_output(cmd, shell=True, cwd=repodir)
109
110 if _contains_submodules(repodir):
111 print("Repo {} contains submodules, use 'git submodule update' to ensure they are up to date".format(repodir))
112 if os.path.exists(os.path.join(repodir, 'scripts/oe-setup-build')):
113 oesetupbuild = os.path.join(repodir, 'scripts/oe-setup-build')
114
115 _write_layer_list(args['destdir'], repodirs)
116
117 if oesetupbuild:
118 oesetupbuild_symlink = os.path.join(args['destdir'], 'setup-build')
119 if os.path.exists(oesetupbuild_symlink):
120 os.remove(oesetupbuild_symlink)
121 os.symlink(os.path.relpath(oesetupbuild,args['destdir']),oesetupbuild_symlink)
122 print("\nRun '{}' to list available build configuration templates and set up a build from one of them.".format(oesetupbuild_symlink))
123
124parser = argparse.ArgumentParser(description="A self contained python script that fetches all the needed layers and sets them to correct revisions using data in a json format from a separate file. The json data can be created from an active build directory with 'bitbake-layers create-layers-setup destdir' and there's a sample file and a schema in meta/files/")
125
126parser.add_argument('--force-bootstraplayer-checkout', action='store_true',
127 help='Force the checkout of the layer containing this file (by default it is presumed that as this script is in it, the layer is already in place).')
128
129try:
130 defaultdest = os.path.dirname(subprocess.check_output('git rev-parse --show-toplevel', universal_newlines=True, shell=True, cwd=os.path.dirname(__file__)))
131except subprocess.CalledProcessError as e:
132 defaultdest = os.path.abspath(".")
133
134parser.add_argument('--destdir', default=defaultdest, help='Where to check out the layers (default is {defaultdest}).'.format(defaultdest=defaultdest))
135parser.add_argument('--jsondata', default=__file__+".json", help='File containing the layer data in json format (default is {defaultjson}).'.format(defaultjson=__file__+".json"))
136
137args = parser.parse_args()
138
139with open(args.jsondata) as f:
140 json_f = json.load(f)
141
142supported_versions = ["1.0"]
143if json_f["version"] not in supported_versions:
144 raise Exception("File {} has version {}, which is not in supported versions: {}".format(args.jsondata, json_f["version"], supported_versions))
145
146_do_checkout(vars(args), json_f)
diff --git a/scripts/oe-setup-vscode b/scripts/oe-setup-vscode
new file mode 100755
index 0000000000..b8642780d5
--- /dev/null
+++ b/scripts/oe-setup-vscode
@@ -0,0 +1,93 @@
1#!/bin/sh
2
3usage() {
4 echo "$0 <OEINIT> <BUILDDIR>"
5 echo " OEINIT: path to directory where the .vscode folder is"
6 echo " BUILDDIR: directory passed to the oe-init-setup-env script"
7}
8
9if [ "$#" -ne 2 ]; then
10 usage
11 exit 1
12fi
13
14OEINIT=$(readlink -f "$1")
15BUILDDIR=$(readlink -f "$2")
16VSCODEDIR=$OEINIT/.vscode
17
18if [ ! -d "$OEINIT" ] || [ ! -d "$BUILDDIR" ]; then
19 echo "$OEINIT and/or $BUILDDIR directories are not present."
20 exit 1
21fi
22
23VSCODE_SETTINGS=$VSCODEDIR/settings.json
24ws_builddir="$(echo "$BUILDDIR" | sed -e "s|$OEINIT|\${workspaceFolder}|g")"
25
26# If BUILDDIR is in scope of VSCode ensure VSCode does not try to index the build folder.
27# This would lead to a busy CPU and finally to an OOM exception.
28mkdir -p "$VSCODEDIR"
29cat <<EOMsettings > "$VSCODE_SETTINGS"
30{
31 "bitbake.pathToBitbakeFolder": "\${workspaceFolder}/bitbake",
32 "bitbake.pathToEnvScript": "\${workspaceFolder}/oe-init-build-env",
33 "bitbake.pathToBuildFolder": "$ws_builddir",
34 "bitbake.commandWrapper": "",
35 "bitbake.workingDirectory": "\${workspaceFolder}",
36 "files.exclude": {
37 "**/.git/**": true,
38 "**/_build/**": true,
39 "**/buildhistory/**": true,
40 "**/cache/**": true,
41 "**/downloads/**": true,
42 "**/node_modules/**": true,
43 "**/oe-logs/**": true,
44 "**/oe-workdir/**": true,
45 "**/sstate-cache/**": true,
46 "**/tmp*/**": true,
47 "**/workspace/attic/**": true,
48 "**/workspace/sources/**": true
49 },
50 "files.watcherExclude": {
51 "**/.git/**": true,
52 "**/_build/**": true,
53 "**/buildhistory/**": true,
54 "**/cache/**": true,
55 "**/downloads/**": true,
56 "**/node_modules/**": true,
57 "**/oe-logs/**": true,
58 "**/oe-workdir/**": true,
59 "**/sstate-cache/**": true,
60 "**/tmp*/**": true,
61 "**/workspace/attic/**": true,
62 "**/workspace/sources/**": true
63 },
64 "python.analysis.exclude": [
65 "**/_build/**",
66 "**/.git/**",
67 "**/buildhistory/**",
68 "**/cache/**",
69 "**/downloads/**",
70 "**/node_modules/**",
71 "**/oe-logs/**",
72 "**/oe-workdir/**",
73 "**/sstate-cache/**",
74 "**/tmp*/**",
75 "**/workspace/attic/**",
76 "**/workspace/sources/**"
77 ]
78}
79EOMsettings
80
81
82# Ask the user if the yocto-bitbake extension should be installed
83VSCODE_EXTENSIONS=$VSCODEDIR/extensions.json
84cat <<EOMextensions > "$VSCODE_EXTENSIONS"
85{
86 "recommendations": [
87 "yocto-project.yocto-bitbake"
88 ]
89}
90EOMextensions
91
92echo "You had no $VSCODEDIR configuration."
93echo "These configuration files have therefore been created for you."
diff --git a/scripts/oe-time-dd-test.sh b/scripts/oe-time-dd-test.sh
index 386de83dce..81748b8c9e 100755
--- a/scripts/oe-time-dd-test.sh
+++ b/scripts/oe-time-dd-test.sh
@@ -1,5 +1,9 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
3# oe-time-dd-test records how much time it takes to 7# oe-time-dd-test records how much time it takes to
4# write <count> number of kilobytes to the filesystem. 8# write <count> number of kilobytes to the filesystem.
5# It also records the number of processes that are in 9# It also records the number of processes that are in
diff --git a/scripts/oe-trim-schemas b/scripts/oe-trim-schemas
index bf77c8cf64..e3b26e273e 100755
--- a/scripts/oe-trim-schemas
+++ b/scripts/oe-trim-schemas
@@ -1,5 +1,7 @@
1#! /usr/bin/env python3 1#! /usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
diff --git a/scripts/oepydevshell-internal.py b/scripts/oepydevshell-internal.py
index e3c35bbe2c..3bf7df1114 100755
--- a/scripts/oepydevshell-internal.py
+++ b/scripts/oepydevshell-internal.py
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
diff --git a/scripts/opkg-query-helper.py b/scripts/opkg-query-helper.py
index bc3ab43823..084d9ef684 100755
--- a/scripts/opkg-query-helper.py
+++ b/scripts/opkg-query-helper.py
@@ -29,7 +29,7 @@ for arg in sys.argv[1:]:
29 args.append(arg) 29 args.append(arg)
30 30
31# Regex for removing version specs after dependency items 31# Regex for removing version specs after dependency items
32verregex = re.compile(' \([=<>]* [^ )]*\)') 32verregex = re.compile(r' \([=<>]* [^ )]*\)')
33 33
34pkg = "" 34pkg = ""
35ver = "" 35ver = ""
diff --git a/scripts/patchtest b/scripts/patchtest
new file mode 100755
index 0000000000..9218db232a
--- /dev/null
+++ b/scripts/patchtest
@@ -0,0 +1,244 @@
1#!/usr/bin/env python3
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# patchtest: execute all unittest test cases discovered for a single patch
6#
7# Copyright (C) 2016 Intel Corporation
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12import json
13import logging
14import os
15import sys
16import traceback
17import unittest
18
19# Include current path so test cases can see it
20sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
21
22# Include patchtest library
23sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '../meta/lib/patchtest'))
24
25from patchtest_parser import PatchtestParser
26from repo import PatchTestRepo
27
28logger = logging.getLogger("patchtest")
29loggerhandler = logging.StreamHandler()
30loggerhandler.setFormatter(logging.Formatter("%(message)s"))
31logger.addHandler(loggerhandler)
32logger.setLevel(logging.INFO)
33info = logger.info
34error = logger.error
35
36def getResult(patch, mergepatch, logfile=None):
37
38 class PatchTestResult(unittest.TextTestResult):
39 """ Patchtest TextTestResult """
40 shouldStop = True
41 longMessage = False
42
43 success = 'PASS'
44 fail = 'FAIL'
45 skip = 'SKIP'
46
47 def startTestRun(self):
48 # let's create the repo already, it can be used later on
49 repoargs = {
50 "repodir": PatchtestParser.repodir,
51 "commit": PatchtestParser.basecommit,
52 "branch": PatchtestParser.basebranch,
53 "patch": patch,
54 }
55
56 self.repo_error = False
57 self.test_error = False
58 self.test_failure = False
59
60 try:
61 self.repo = PatchtestParser.repo = PatchTestRepo(**repoargs)
62 except:
63 logger.error(traceback.print_exc())
64 self.repo_error = True
65 self.stop()
66 return
67
68 if mergepatch:
69 self.repo.merge()
70
71 def addError(self, test, err):
72 self.test_error = True
73 (ty, va, trace) = err
74 logger.error(traceback.print_exc())
75
76 def addFailure(self, test, err):
77 test_description = test.id().split('.')[-1].replace('_', ' ').replace("cve", "CVE").replace("signed off by",
78 "Signed-off-by").replace("upstream status",
79 "Upstream-Status").replace("non auh",
80 "non-AUH").replace("presence format", "presence")
81 self.test_failure = True
82 fail_str = '{}: {}: {} ({})'.format(self.fail,
83 test_description, json.loads(str(err[1]))["issue"],
84 test.id())
85 print(fail_str)
86 if logfile:
87 with open(logfile, "a") as f:
88 f.write(fail_str + "\n")
89
90 def addSuccess(self, test):
91 test_description = test.id().split('.')[-1].replace('_', ' ').replace("cve", "CVE").replace("signed off by",
92 "Signed-off-by").replace("upstream status",
93 "Upstream-Status").replace("non auh",
94 "non-AUH").replace("presence format", "presence")
95 success_str = '{}: {} ({})'.format(self.success,
96 test_description, test.id())
97 print(success_str)
98 if logfile:
99 with open(logfile, "a") as f:
100 f.write(success_str + "\n")
101
102 def addSkip(self, test, reason):
103 test_description = test.id().split('.')[-1].replace('_', ' ').replace("cve", "CVE").replace("signed off by",
104 "Signed-off-by").replace("upstream status",
105 "Upstream-Status").replace("non auh",
106 "non-AUH").replace("presence format", "presence")
107 skip_str = '{}: {}: {} ({})'.format(self.skip,
108 test_description, json.loads(str(reason))["issue"],
109 test.id())
110 print(skip_str)
111 if logfile:
112 with open(logfile, "a") as f:
113 f.write(skip_str + "\n")
114
115 def stopTestRun(self):
116
117 # in case there was an error on repo object creation, just return
118 if self.repo_error:
119 return
120
121 self.repo.clean()
122
123 return PatchTestResult
124
125def _runner(resultklass, prefix=None):
126 # load test with the corresponding prefix
127 loader = unittest.TestLoader()
128 if prefix:
129 loader.testMethodPrefix = prefix
130
131 # create the suite with discovered tests and the corresponding runner
132 suite = loader.discover(
133 start_dir=PatchtestParser.testdir,
134 pattern=PatchtestParser.pattern,
135 top_level_dir=PatchtestParser.topdir,
136 )
137 ntc = suite.countTestCases()
138
139 # if there are no test cases, just quit
140 if not ntc:
141 return 2
142 runner = unittest.TextTestRunner(resultclass=resultklass, verbosity=0)
143
144 try:
145 result = runner.run(suite)
146 except:
147 logger.error(traceback.print_exc())
148 logger.error('patchtest: something went wrong')
149 return 1
150 if result.test_failure or result.test_error:
151 return 1
152
153 return 0
154
155def run(patch, logfile=None):
156 """ Load, setup and run pre and post-merge tests """
157 # Get the result class and install the control-c handler
158 unittest.installHandler()
159
160 # run pre-merge tests, meaning those methods with 'pretest' as prefix
161 premerge_resultklass = getResult(patch, False, logfile)
162 premerge_result = _runner(premerge_resultklass, 'pretest')
163
164 # run post-merge tests, meaning those methods with 'test' as prefix
165 postmerge_resultklass = getResult(patch, True, logfile)
166 postmerge_result = _runner(postmerge_resultklass, 'test')
167
168 print_result_message(premerge_result, postmerge_result)
169 return premerge_result or postmerge_result
170
171def print_result_message(preresult, postresult):
172 print("----------------------------------------------------------------------\n")
173 if preresult == 2 and postresult == 2:
174 logger.error(
175 "patchtest: No test cases found - did you specify the correct suite directory?"
176 )
177 if preresult == 1 or postresult == 1:
178 logger.error(
179 "WARNING: patchtest: At least one patchtest caused a failure or an error - please check https://wiki.yoctoproject.org/wiki/Patchtest for further guidance"
180 )
181 else:
182 logger.info("OK: patchtest: All patchtests passed")
183 print("----------------------------------------------------------------------\n")
184
185def main():
186 tmp_patch = False
187 patch_path = PatchtestParser.patch_path
188 log_results = PatchtestParser.log_results
189 log_path = None
190 patch_list = None
191
192 git_status = os.popen("(cd %s && git status)" % PatchtestParser.repodir).read()
193 status_matches = ["Changes not staged for commit", "Changes to be committed"]
194 if any([match in git_status for match in status_matches]):
195 logger.error("patchtest: there are uncommitted changes in the target repo that would be overwritten. Please commit or restore them before running patchtest")
196 return 1
197
198 if os.path.isdir(patch_path):
199 patch_list = [os.path.join(patch_path, filename) for filename in sorted(os.listdir(patch_path))]
200 else:
201 patch_list = [patch_path]
202
203 for patch in patch_list:
204 if os.path.getsize(patch) == 0:
205 logger.error('patchtest: patch is empty')
206 return 1
207
208 logger.info('Testing patch %s' % patch)
209
210 if log_results:
211 log_path = patch + ".testresult"
212 with open(log_path, "a") as f:
213 f.write("Patchtest results for patch '%s':\n\n" % patch)
214
215 try:
216 if log_path:
217 run(patch, log_path)
218 else:
219 run(patch)
220 finally:
221 if tmp_patch:
222 os.remove(patch)
223
224if __name__ == '__main__':
225 ret = 1
226
227 # Parse the command line arguments and store it on the PatchtestParser namespace
228 PatchtestParser.set_namespace()
229
230 # set debugging level
231 if PatchtestParser.debug:
232 logger.setLevel(logging.DEBUG)
233
234 # if topdir not define, default it to testdir
235 if not PatchtestParser.topdir:
236 PatchtestParser.topdir = PatchtestParser.testdir
237
238 try:
239 ret = main()
240 except Exception:
241 import traceback
242 traceback.print_exc(5)
243
244 sys.exit(ret)
diff --git a/scripts/patchtest-get-branch b/scripts/patchtest-get-branch
new file mode 100755
index 0000000000..c6e242f8b6
--- /dev/null
+++ b/scripts/patchtest-get-branch
@@ -0,0 +1,81 @@
1#!/usr/bin/env python3
2
3# Get target branch from the corresponding mbox
4#
5# NOTE: this script was based on patches coming to the openembedded-core
6# where target branch is defined inside brackets as subject prefix
7# i.e. [master], [rocko], etc.
8#
9# Copyright (C) 2016 Intel Corporation
10#
11# SPDX-License-Identifier: GPL-2.0-only
12#
13
14import mailbox
15import argparse
16import re
17import git
18
19re_prefix = re.compile(r"(\[.*\])", re.DOTALL)
20
21def get_branch(filepath_repo, filepath_mbox, default_branch):
22 branch = None
23
24 # get all remotes branches
25 gitbranches = git.Git(filepath_repo).branch('-a').splitlines()
26
27 # from gitbranches, just get the names
28 branches = [b.split('/')[-1] for b in gitbranches]
29
30 subject = ' '.join(mailbox.mbox(filepath_mbox)[0]['subject'].splitlines())
31
32 # we expect that patches will have somewhere between one and three
33 # consecutive sets of square brackets with tokens inside, e.g.:
34 # 1. [PATCH]
35 # 2. [OE-core][PATCH]
36 # 3. [OE-core][kirkstone][PATCH]
37 # Some of them may also be part of a series, in which case the PATCH
38 # token will be formatted like:
39 # [PATCH 1/4]
40 # or they will be revisions to previous patches, where it will be:
41 # [PATCH v2]
42 # Or they may contain both:
43 # [PATCH v2 3/4]
44 # In any case, we want mprefix to contain all of these tokens so
45 # that we can search for branch names within them.
46 mprefix = re.findall(r'\[.*?\]', subject)
47 found_branch = None
48 if mprefix:
49 # Iterate over the tokens and compare against the branch list to
50 # figure out which one the patch is targeting
51 for token in mprefix:
52 stripped = token.lower().strip('[]')
53 if default_branch in stripped:
54 found_branch = default_branch
55 break
56 else:
57 for branch in branches:
58 # ignore branches named "core"
59 if branch != "core" and stripped.rfind(branch) != -1:
60 found_branch = token.split(' ')[0].strip('[]')
61 break
62
63 # if there's no mprefix content or no known branches were found in
64 # the tokens, assume the target is master
65 if found_branch is None:
66 found_branch = "master"
67
68 return (subject, found_branch)
69
70if __name__ == '__main__':
71
72 parser = argparse.ArgumentParser()
73 parser.add_argument('repo', metavar='REPO', help='Main repository')
74 parser.add_argument('mbox', metavar='MBOX', help='mbox filename')
75 parser.add_argument('--default-branch', metavar='DEFAULT_BRANCH', default='master', help='Use this branch if no one is found')
76 parser.add_argument('--separator', '-s', metavar='SEPARATOR', default=' ', help='Char separator for output data')
77 args = parser.parse_args()
78
79 subject, branch = get_branch(args.repo, args.mbox, args.default_branch)
80 print("branch: %s" % branch)
81
diff --git a/scripts/patchtest-get-series b/scripts/patchtest-get-series
new file mode 100755
index 0000000000..908442089f
--- /dev/null
+++ b/scripts/patchtest-get-series
@@ -0,0 +1,115 @@
1#!/bin/bash -e
2#
3# get-latest-series: Download latest patch series from Patchwork
4#
5# Copyright (C) 2023 BayLibre Inc.
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10# the interval into the past which we want to check for new series, in minutes
11INTERVAL_MINUTES=30
12
13# Maximum number of series to retrieve. the Patchwork API can support up to 250
14# at once
15SERIES_LIMIT=250
16
17# Location to save patches
18DOWNLOAD_PATH="."
19
20# Name of the file to use/check as a log of previously-tested series IDs
21SERIES_TEST_LOG=".series_test.log"
22
23# Patchwork project to pull series patches from
24PROJECT="oe-core"
25
26# The Patchwork server to pull from
27SERVER="https://patchwork.yoctoproject.org/api/1.2/"
28
29help()
30{
31 echo "Usage: get-latest-series [ -i | --interval MINUTES ]
32 [ -d | --directory DIRECTORY ]
33 [ -l | --limit COUNT ]
34 [ -h | --help ]
35 [ -t | --tested-series LOGFILE]
36 [ -p | --project PROJECT ]
37 [ -s | --server SERVER ]"
38 exit 2
39}
40
41while [ "$1" != "" ]; do
42 case $1 in
43 -i|--interval)
44 INTERVAL_MINUTES=$2
45 shift 2
46 ;;
47 -l|--limit)
48 SERIES_LIMIT=$2
49 shift 2
50 ;;
51 -d|--directory)
52 DOWNLOAD_PATH=$2
53 shift 2
54 ;;
55 -p|--project)
56 PROJECT=$2
57 shift 2
58 ;;
59 -s|--server)
60 SERVER=$2
61 shift 2
62 ;;
63 -t|--tested-series)
64 SERIES_TEST_LOG=$2
65 shift 2
66 ;;
67 -h|--help)
68 help
69 ;;
70 *)
71 echo "Unknown option $1"
72 help
73 ;;
74 esac
75done
76
77# The time this script is running at
78START_TIME=$(date --date "now" +"%Y-%m-%dT%H:%M:%S")
79
80# the corresponding timestamp we want to check against for new patch series
81SERIES_CHECK_LIMIT=$(date --date "now - ${INTERVAL_MINUTES} minutes" +"%Y-%m-%dT%H:%M:%S")
82
83echo "Start time is $START_TIME"
84echo "Series check limit is $SERIES_CHECK_LIMIT"
85
86# Create DOWNLOAD_PATH if it doesn't exist
87if [ ! -d "$DOWNLOAD_PATH" ]; then
88 mkdir "${DOWNLOAD_PATH}"
89fi
90
91# Create SERIES_TEST_LOG if it doesn't exist
92if [ ! -f "$SERIES_TEST_LOG" ]; then
93 touch "${SERIES_TEST_LOG}"
94fi
95
96# Retrieve a list of series IDs from the 'git-pw series list' output. The API
97# supports a maximum of 250 results, so make sure we allow that when required
98SERIES_LIST=$(git-pw --project "${PROJECT}" --server "${SERVER}" series list --since "${SERIES_CHECK_LIMIT}" --limit "${SERIES_LIMIT}" | awk '{print $2}' | xargs | sed -e 's/[^0-9 ]//g')
99
100if [ -z "$SERIES_LIST" ]; then
101 echo "No new series for project ${PROJECT} since ${SERIES_CHECK_LIMIT}"
102 exit 0
103fi
104
105# Check each series ID
106for SERIES in $SERIES_LIST; do
107 # Download the series only if it's not found in the SERIES_TEST_LOG
108 if ! grep -w --quiet "${SERIES}" "${SERIES_TEST_LOG}"; then
109 echo "Downloading $SERIES..."
110 git-pw series download --separate "${SERIES}" "${DOWNLOAD_PATH}"
111 echo "${SERIES}" >> "${SERIES_TEST_LOG}"
112 else
113 echo "Already tested ${SERIES}. Skipping..."
114 fi
115done
diff --git a/scripts/patchtest-send-results b/scripts/patchtest-send-results
new file mode 100755
index 0000000000..8a3dadbd11
--- /dev/null
+++ b/scripts/patchtest-send-results
@@ -0,0 +1,110 @@
1#!/usr/bin/env python3
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# patchtest: execute all unittest test cases discovered for a single patch
6# Note that this script is currently under development and has been
7# hard-coded with default values for testing purposes. This script
8# should not be used without changing the default recipient, at minimum.
9#
10# Copyright (C) 2023 BayLibre Inc.
11#
12# SPDX-License-Identifier: GPL-2.0-only
13#
14
15import argparse
16import boto3
17import configparser
18import mailbox
19import os
20import re
21import sys
22
23greeting = """Thank you for your submission. Patchtest identified one
24or more issues with the patch. Please see the log below for
25more information:\n\n---\n"""
26
27suggestions = """\n---\n\nPlease address the issues identified and
28submit a new revision of the patch, or alternatively, reply to this
29email with an explanation of why the patch should be accepted. If you
30believe these results are due to an error in patchtest, please submit a
31bug at https://bugzilla.yoctoproject.org/ (use the 'Patchtest' category
32under 'Yocto Project Subprojects'). For more information on specific
33failures, see: https://wiki.yoctoproject.org/wiki/Patchtest. Thank
34you!"""
35
36def has_a_failed_test(raw_results):
37 return any(raw_result.split(':')[0] == "FAIL" for raw_result in raw_results.splitlines())
38
39parser = argparse.ArgumentParser(description="Send patchtest results to a submitter for a given patch")
40parser.add_argument("-p", "--patch", dest="patch", required=True, help="The patch file to summarize")
41parser.add_argument("-d", "--debug", dest="debug", required=False, action='store_true', help="Print raw email headers and content, but don't actually send it")
42args = parser.parse_args()
43
44if not os.path.exists(args.patch):
45 print(f"Patch '{args.patch}' not found - did you provide the right path?")
46 sys.exit(1)
47elif not os.path.exists(args.patch + ".testresult"):
48 print(f"Found patch '{args.patch}' but '{args.patch}.testresult' was not present. Have you run patchtest on the patch?")
49 sys.exit(1)
50
51result_file = args.patch + ".testresult"
52testresult = None
53
54with open(result_file, "r") as f:
55 testresult = f.read()
56
57# we know these patch files will only contain a single patch, so only
58# worry about the first element for getting the subject
59mbox = mailbox.mbox(args.patch)
60mbox_subject = mbox[0]['subject']
61subject_line = f"Patchtest results for {mbox_subject}"
62
63# extract the submitter email address and use it as the reply address
64# for the results
65reply_address = mbox[0]['from']
66
67# extract the message ID and use that as the in-reply-to address
68# TODO: This will need to change again when patchtest can handle a whole
69# series at once
70in_reply_to = mbox[0]['Message-ID']
71
72# the address the results email is sent from
73from_address = "patchtest@automation.yoctoproject.org"
74
75# mailing list to CC
76cc_address = "openembedded-core@lists.openembedded.org"
77
78if has_a_failed_test(testresult):
79 reply_contents = None
80 if len(max(open(result_file, 'r'), key=len)) > 220:
81 warning = "Tests failed for the patch, but the results log could not be processed due to excessive result line length."
82 reply_contents = greeting + warning + suggestions
83 else:
84 reply_contents = greeting + testresult + suggestions
85
86 ses_client = boto3.client('ses', region_name='us-west-2')
87
88 # Construct the headers for the email. We only want to reply
89 # directly to the tested patch, so make In-Reply-To and References
90 # the same value.
91 raw_data = 'From: ' + from_address + '\nTo: ' + reply_address + \
92 '\nCC: ' + cc_address + '\nSubject:' + subject_line + \
93 '\nIn-Reply-To:' + in_reply_to + \
94 '\nReferences:' + in_reply_to + \
95 '\nMIME-Version: 1.0" + \
96 "\nContent-type: Multipart/Mixed;boundary="NextPart"\n\n--NextPart\nContent-Type: text/plain\n\n' + \
97 reply_contents + '\n\n--NextPart'
98
99 if args.debug:
100 print(f"RawMessage: \n\n{raw_data}")
101 else:
102 response = ses_client.send_raw_email(
103 Source="patchtest@automation.yoctoproject.org",
104 RawMessage={
105 "Data": raw_data,
106 },
107 )
108
109else:
110 print(f"No failures identified for {args.patch}.")
diff --git a/scripts/patchtest-setup-sharedir b/scripts/patchtest-setup-sharedir
new file mode 100755
index 0000000000..277677e527
--- /dev/null
+++ b/scripts/patchtest-setup-sharedir
@@ -0,0 +1,83 @@
1#!/bin/bash -e
2#
3# patchtest-setup-sharedir: Setup a directory for storing mboxes and
4# repositories to be shared with the guest machine, including updates to
5# the repos if the directory already exists
6#
7# Copyright (C) 2023 BayLibre Inc.
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12# poky repository
13POKY_REPO="https://git.yoctoproject.org/poky"
14
15# patchtest repository
16PATCHTEST_REPO="https://git.yoctoproject.org/patchtest"
17
18# the name of the directory
19SHAREDIR="patchtest_share"
20
21help()
22{
23 echo "Usage: patchtest-setup-sharedir [ -d | --directory SHAREDIR ]
24 [ -p | --patchtest PATCHTEST_REPO ]
25 [ -y | --poky POKY_REPO ]"
26 exit 2
27}
28
29while [ "$1" != "" ]; do
30 case $1 in
31 -d|--directory)
32 SHAREDIR=$2
33 shift 2
34 ;;
35 -p|--patchtest)
36 PATCHTEST_REPO=$2
37 shift 2
38 ;;
39 -y|--poky)
40 POKY_REPO=$2
41 shift 2
42 ;;
43 -h|--help)
44 help
45 ;;
46 *)
47 echo "Unknown option $1"
48 help
49 ;;
50 esac
51done
52
53# define MBOX_DIR where the patch series will be stored by
54# get-latest-series
55MBOX_DIR="${SHAREDIR}/mboxes"
56
57# Create SHAREDIR if it doesn't exist
58if [ ! -d "$SHAREDIR" ]; then
59 mkdir -p "${SHAREDIR}"
60 echo "Created ${SHAREDIR}"
61fi
62
63# Create the mboxes directory if it doesn't exist
64if [ ! -d "$MBOX_DIR" ]; then
65 mkdir -p "${MBOX_DIR}"
66 echo "Created ${MBOX_DIR}"
67fi
68
69# clone poky if it's not already present; otherwise, update it
70if [ ! -d "$POKY_REPO" ]; then
71 BASENAME=$(basename ${POKY_REPO})
72 git clone "${POKY_REPO}" "${SHAREDIR}/${BASENAME}"
73else
74 (cd "${SHAREDIR}/$BASENAME" && git pull)
75fi
76
77# clone patchtest if it's not already present; otherwise, update it
78if [ ! -d "$PATCHTEST_REPO" ]; then
79 BASENAME=$(basename ${PATCHTEST_REPO})
80 git clone "${PATCHTEST_REPO}" "${SHAREDIR}/${BASENAME}"
81else
82 (cd "${SHAREDIR}/$BASENAME" && git pull)
83fi
diff --git a/scripts/patchtest.README b/scripts/patchtest.README
new file mode 100644
index 0000000000..3c1ee1af1d
--- /dev/null
+++ b/scripts/patchtest.README
@@ -0,0 +1,159 @@
1# Patchtest
2
3## Introduction
4
5Patchtest is a test framework for community patches based on the standard
6unittest python module. As input, it needs three elements to work properly:
7
8- a patch in mbox format (either created with `git format-patch` or fetched
9from 'patchwork')
10- a test suite
11- a target repository
12
13The first test suite intended to be used with patchtest is found in the
14openembedded-core repository [1], targeted for patches that get into the
15openembedded-core mailing list [2]. This suite is also intended as a
16baseline for development of similar suites for other layers as needed.
17
18Patchtest can either run on a host or a guest machine, depending on
19which environment you prefer. If you plan to test your own patches (a
20good practice before these are sent to the mailing list), the easiest
21way is to install and execute on your local host; in the other hand, if
22automatic testing is intended, the guest method is strongly recommended.
23The guest method requires the use of the patchtest layer, in addition to
24the tools available in oe-core: https://git.yoctoproject.org/patchtest/
25
26## Installation
27
28As a tool for use with the Yocto Project, the [quick start
29guide](https://docs.yoctoproject.org/brief-yoctoprojectqs/index.html)
30contains the necessary prerequisites. In addition, patchtest relies on
31several Python modules for parsing and analysis, which can be installed
32by running `pip install -r meta/lib/patchtest/requirements.txt`. Note
33that git-pw is not automatically added to the user's PATH; by default,
34it is installed at ~/.local/bin/git-pw.
35
36For git-pw (and therefore scripts such as patchtest-get--series) to work, you need
37to provide a Patchwork instance in your user's .gitconfig, like so (the project
38can be specified using the --project argument):
39
40 git config --global pw.server "https://patchwork.yoctoproject.org/api/1.2/"
41
42To work with patchtest, you should have the following repositories cloned:
43
441. https://git.openembedded.org/openembedded-core/ (or https://git.yoctoproject.org/poky/)
452. https://git.openembedded.org/bitbake/ (if not using poky)
463. https://git.yoctoproject.org/patchtest (if using guest mode)
47
48## Usage
49
50### Obtaining Patches
51
52Patch files can be obtained directly from cloned repositories using `git
53format-patch -N` (where N is the number of patches starting from HEAD to
54generate). git-pw can also be used with filters for users, patch/series IDs,
55and timeboxes if specific patches are desired. For more information, see the
56git-pw [documentation](https://patchwork.readthedocs.io/projects/git-pw/en/latest/).
57
58Alternatively, `scripts/patchtest-get-series` can be used to pull mbox files from
59the Patchwork instance configured previously in .gitconfig. It uses a log file
60called ".series_test.log" to store and compare series IDs so that the same
61versions of a patch are not tested multiple times unintentionally. By default,
62it will pull up to five patch series from the last 30 minutes using oe-core as
63the target project, but these parameters can be configured using the `--limit`,
64`--interval`, and `--project` arguments respectively. For more information, run
65`patchtest-get-series -h`.
66
67### Host Mode
68
69To run patchtest on the host, do the following:
70
711. In openembedded-core/poky, do `source oe-init-build-env`
722. Generate patch files from the target repository by doing `git format-patch -N`,
73 where N is the number of patches starting at HEAD, or by using git-pw
74 or patchtest-get-series
753. Run patchtest on a patch file by doing the following:
76
77 patchtest --patch /path/to/patch/file
78
79 or, if you have stored the patch files in a directory, do:
80
81 patchtest --directory /path/to/patch/directory
82
83 For example, to test `master-gcc-Fix--fstack-protector-issue-on-aarch64.patch` against the oe-core test suite:
84
85 patchtest --patch master-gcc-Fix--fstack-protector-issue-on-aarch64.patch
86
87 If you want to use a different test suite or target repository, you can use the --testdir and --repodir flags:
88
89 patchtest --patch /path/to/patch/file --repodir /path/to/repo --testdir /path/to/test/dir
90
91### Guest Mode
92
93Patchtest's guest mode has been refactored to more closely mirror the
94typical Yocto Project image build workflow, but there are still some key
95differences to keep in mind. The primary objective is to provide a level
96of isolation from the host when testing patches pulled automatically
97from the mailing lists. When executed this way, the test process is
98essentially running random code from the internet and could be
99catastrophic if malicious bits or even poorly-handled edge cases aren't
100protected against. In order to use this mode, the
101https://git.yoctoproject.org/patchtest/ repository must be cloned and
102the meta-patchtest layer added to bblayers.conf.
103
104The general flow of guest mode is:
105
1061. Run patchtest-setup-sharedir --directory <dirname> to create a
107 directory for mounting
1082. Collect patches via patchtest-get-series (or other manual step) into the
109 <dirname>/mboxes path
1103. Ensure that a user with ID 1200 has appropriate read/write
111 permissions to <dirname> and <dirname>/mboxes, so that the
112 "patchtest" user in the core-image-patchtest image can function
1134. Build the core-image-patchtest image
1145. Run the core-image-patchtest image with the mounted sharedir, like
115 so:
116 `runqemu kvm nographic qemuparams="-snapshot -fsdev
117 local,id=test_mount,path=/workspace/yocto/poky/build/patchtestdir,security_model=mapped
118 -device virtio-9p-pci,fsdev=test_mount,mount_tag=test_mount -smp 4 -m
119 2048"`
120
121Patchtest is run by an initscript for the core-image-patchtest image and
122shuts down after completion, so there is no input required from a user
123during operation. Unlike in host mode, the guest is designed to
124automatically generate test result files, in the same directory as the
125targeted patch files but with .testresult as an extension. These contain
126the entire output of the patchtest run for each respective pass,
127including the PASS, FAIL, and SKIP indicators for each test run.
128
129### Running Patchtest Selftests
130
131Patchtest also includes selftests, which are currently in the form of
132several contrived patch files and a runner script found in
133`meta/lib/patchtest/selftest/`. In order to run these, the
134`meta-selftest` layer must be added to bblayers.conf. It is also
135recommended to set BB_SERVER_TIMEOUT (and thus enable memory-resident
136bitbake) in local.conf to reduce runtime, as the bitbake startup process
137will otherwise add to it significantly when restarted for each test
138patch.
139
140## Contributing
141
142The yocto mailing list (openembedded-core@lists.openembedded.org) is used for questions,
143comments and patch review. It is subscriber only, so please register before
144posting.
145
146When sending single patches, please use something like:
147
148 git send-email -M -1 --to=openembedded-core@lists.openembedded.org --subject-prefix=OE-core][PATCH
149
150## Maintenance
151-----------
152
153Maintainers:
154 Trevor Gamblin <tgamblin@baylibre.com>
155
156## Links
157-----
158[1] https://git.openembedded.org/openembedded-core/
159[2] https://www.yoctoproject.org/community/mailing-lists/
diff --git a/scripts/postinst-intercepts/update_gtk_icon_cache b/scripts/postinst-intercepts/update_gtk_icon_cache
index 99367a2855..a92bd840c6 100644
--- a/scripts/postinst-intercepts/update_gtk_icon_cache
+++ b/scripts/postinst-intercepts/update_gtk_icon_cache
@@ -11,7 +11,11 @@ $STAGING_DIR_NATIVE/${libdir_native}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --u
11 11
12for icondir in $D/usr/share/icons/*/ ; do 12for icondir in $D/usr/share/icons/*/ ; do
13 if [ -d $icondir ] ; then 13 if [ -d $icondir ] ; then
14 gtk-update-icon-cache -fqt $icondir 14 for gtkuic_cmd in gtk-update-icon-cache gtk4-update-icon-cache ; do
15 if [ -n "$(which $gtkuic_cmd)" ]; then
16 $gtkuic_cmd -fqt $icondir
17 fi
18 done
15 fi 19 fi
16done 20done
17 21
diff --git a/scripts/postinst-intercepts/update_mandb b/scripts/postinst-intercepts/update_mandb
new file mode 100644
index 0000000000..f91bafdb11
--- /dev/null
+++ b/scripts/postinst-intercepts/update_mandb
@@ -0,0 +1,18 @@
1#!/bin/sh
2#
3# SPDX-License-Identifier: MIT
4#
5
6set -eu
7
8# Create a temporary man_db.conf with paths to the rootfs, as mandb needs absolute paths
9CONFIG=$(mktemp --tmpdir update-mandb.XXXXX)
10sed "s:\(\s\)/:\1$D/:g" $D${sysconfdir}/man_db.conf > $CONFIG
11
12mkdir -p $D${localstatedir}/cache/man/
13
14PSEUDO_UNLOAD=1 ${binprefix}qemuwrapper -L $D $D${bindir}/mandb --config-file $CONFIG --create
15
16rm -f $CONFIG
17
18chown -R man:man $D${localstatedir}/cache/man/
diff --git a/scripts/postinst-intercepts/update_udev_hwdb b/scripts/postinst-intercepts/update_udev_hwdb
index 8076b8ae6f..8b3f5de791 100644
--- a/scripts/postinst-intercepts/update_udev_hwdb
+++ b/scripts/postinst-intercepts/update_udev_hwdb
@@ -9,14 +9,17 @@ case "${PREFERRED_PROVIDER_udev}" in
9 systemd) 9 systemd)
10 UDEV_EXTRA_ARGS="--usr" 10 UDEV_EXTRA_ARGS="--usr"
11 UDEVLIBDIR="${rootlibexecdir}" 11 UDEVLIBDIR="${rootlibexecdir}"
12 UDEVADM="${base_bindir}/udevadm"
12 ;; 13 ;;
13 14
14 *) 15 *)
15 UDEV_EXTRA_ARGS="" 16 UDEV_EXTRA_ARGS=""
16 UDEVLIBDIR="${sysconfdir}" 17 UDEVLIBDIR="${sysconfdir}"
18 UDEVADM="${bindir}/udevadm"
17 ;; 19 ;;
18esac 20esac
19 21
20rm -f $D${UDEVLIBDIR}/udev/hwdb.bin 22rm -f $D${UDEVLIBDIR}/udev/hwdb.bin
21PSEUDO_UNLOAD=1 ${binprefix}qemuwrapper -L $D $D${libexecdir}/${binprefix}udevadm hwdb --update --root $D ${UDEV_EXTRA_ARGS} 23PSEUDO_UNLOAD=1 ${binprefix}qemuwrapper -L $D $D${UDEVADM} hwdb --update --root $D ${UDEV_EXTRA_ARGS} ||
24 PSEUDO_UNLOAD=1 qemuwrapper -L $D $D${UDEVADM} hwdb --update --root $D ${UDEV_EXTRA_ARGS}
22chown root:root $D${UDEVLIBDIR}/udev/hwdb.bin 25chown root:root $D${UDEVLIBDIR}/udev/hwdb.bin
diff --git a/scripts/pull-sdpx-licenses.py b/scripts/pull-sdpx-licenses.py
new file mode 100755
index 0000000000..597a62133f
--- /dev/null
+++ b/scripts/pull-sdpx-licenses.py
@@ -0,0 +1,101 @@
1#! /usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: GPL-2.0-only
6
7import argparse
8import json
9import sys
10import urllib.request
11from pathlib import Path
12
13TOP_DIR = Path(__file__).parent.parent
14
15
16def main():
17 parser = argparse.ArgumentParser(
18 description="Update SPDX License files from upstream"
19 )
20 parser.add_argument(
21 "-v",
22 "--version",
23 metavar="MAJOR.MINOR[.MICRO]",
24 help="Pull specific version of License list instead of latest",
25 )
26 parser.add_argument(
27 "--overwrite",
28 action="store_true",
29 help="Update existing license file text with upstream text",
30 )
31 parser.add_argument(
32 "--deprecated",
33 action="store_true",
34 help="Update deprecated licenses",
35 )
36 parser.add_argument(
37 "--dest",
38 type=Path,
39 default=TOP_DIR / "meta" / "files" / "common-licenses",
40 help="Write licenses to directory DEST. Default is %(default)s",
41 )
42
43 args = parser.parse_args()
44
45 if args.version:
46 version = f"v{args.version}"
47 else:
48 # Fetch the latest release
49 req = urllib.request.Request(
50 "https://api.github.com/repos/spdx/license-list-data/releases/latest"
51 )
52 req.add_header("X-GitHub-Api-Version", "2022-11-28")
53 req.add_header("Accept", "application/vnd.github+json")
54 with urllib.request.urlopen(req) as response:
55 data = json.load(response)
56 version = data["tag_name"]
57
58 print(f"Pulling SPDX license list version {version}")
59 req = urllib.request.Request(
60 f"https://raw.githubusercontent.com/spdx/license-list-data/{version}/json/licenses.json"
61 )
62 with urllib.request.urlopen(req) as response:
63 spdx_licenses = json.load(response)
64
65 with (TOP_DIR / "meta" / "files" / "spdx-licenses.json").open("w") as f:
66 json.dump(spdx_licenses, f, sort_keys=True, indent=2)
67
68 total_count = len(spdx_licenses["licenses"])
69 updated = 0
70 for idx, lic in enumerate(spdx_licenses["licenses"]):
71 lic_id = lic["licenseId"]
72
73 print(f"[{idx + 1} of {total_count}] ", end="")
74
75 dest_license_file = args.dest / lic_id
76 if dest_license_file.is_file() and not args.overwrite:
77 print(f"Skipping {lic_id} since it already exists")
78 continue
79
80 print(f"Fetching {lic_id}... ", end="", flush=True)
81
82 req = urllib.request.Request(lic["detailsUrl"])
83 with urllib.request.urlopen(req) as response:
84 lic_data = json.load(response)
85
86 if lic_data["isDeprecatedLicenseId"] and not args.deprecated:
87 print("Skipping (deprecated)")
88 continue
89
90 with dest_license_file.open("w") as f:
91 f.write(lic_data["licenseText"])
92 updated += 1
93 print("done")
94
95 print(f"Updated {updated} licenses")
96
97 return 0
98
99
100if __name__ == "__main__":
101 sys.exit(main())
diff --git a/scripts/pybootchartgui/pybootchartgui/draw.py b/scripts/pybootchartgui/pybootchartgui/draw.py
index fc708b55c3..16739a0fa1 100644
--- a/scripts/pybootchartgui/pybootchartgui/draw.py
+++ b/scripts/pybootchartgui/pybootchartgui/draw.py
@@ -69,6 +69,11 @@ CPU_COLOR = (0.40, 0.55, 0.70, 1.0)
69IO_COLOR = (0.76, 0.48, 0.48, 0.5) 69IO_COLOR = (0.76, 0.48, 0.48, 0.5)
70# Disk throughput color. 70# Disk throughput color.
71DISK_TPUT_COLOR = (0.20, 0.71, 0.20, 1.0) 71DISK_TPUT_COLOR = (0.20, 0.71, 0.20, 1.0)
72
73BYTES_RECEIVED_COLOR = (0.0, 0.0, 1.0, 1.0)
74BYTES_TRANSMITTED_COLOR = (1.0, 0.0, 0.0, 1.0)
75BYTES_RECEIVE_DIFF_COLOR = (0.0, 0.0, 1.0, 0.3)
76BYTES_TRANSMIT_DIFF_COLOR = (1.0, 0.0, 0.0, 0.3)
72# CPU load chart color. 77# CPU load chart color.
73FILE_OPEN_COLOR = (0.20, 0.71, 0.71, 1.0) 78FILE_OPEN_COLOR = (0.20, 0.71, 0.71, 1.0)
74# Mem cached color 79# Mem cached color
@@ -80,6 +85,22 @@ MEM_BUFFERS_COLOR = (0.4, 0.4, 0.4, 0.3)
80# Swap color 85# Swap color
81MEM_SWAP_COLOR = DISK_TPUT_COLOR 86MEM_SWAP_COLOR = DISK_TPUT_COLOR
82 87
88# avg10 CPU pressure color
89CPU_PRESSURE_AVG10_COLOR = (0.0, 0.0, 0.0, 1.0)
90# delta total CPU pressure color
91CPU_PRESSURE_TOTAL_COLOR = CPU_COLOR
92# avg10 IO pressure color
93IO_PRESSURE_AVG10_COLOR = (0.0, 0.0, 0.0, 1.0)
94# delta total IO pressure color
95IO_PRESSURE_TOTAL_COLOR = IO_COLOR
96# avg10 memory pressure color
97MEM_PRESSURE_AVG10_COLOR = (0.0, 0.0, 0.0, 1.0)
98# delta total memory pressure color
99MEM_PRESSURE_TOTAL_COLOR = DISK_TPUT_COLOR
100
101
102
103
83# Process border color. 104# Process border color.
84PROC_BORDER_COLOR = (0.71, 0.71, 0.71, 1.0) 105PROC_BORDER_COLOR = (0.71, 0.71, 0.71, 1.0)
85# Waiting process color. 106# Waiting process color.
@@ -340,6 +361,12 @@ def extents(options, xscale, trace):
340 h += 30 + bar_h 361 h += 30 + bar_h
341 if trace.disk_stats: 362 if trace.disk_stats:
342 h += 30 + bar_h 363 h += 30 + bar_h
364 if trace.cpu_pressure:
365 h += 30 + bar_h
366 if trace.io_pressure:
367 h += 30 + bar_h
368 if trace.mem_pressure:
369 h += 30 + bar_h
343 if trace.monitor_disk: 370 if trace.monitor_disk:
344 h += 30 + bar_h 371 h += 30 + bar_h
345 if trace.mem_stats: 372 if trace.mem_stats:
@@ -415,6 +442,151 @@ def render_charts(ctx, options, clip, trace, curr_y, w, h, sec_w):
415 442
416 curr_y = curr_y + 30 + bar_h 443 curr_y = curr_y + 30 + bar_h
417 444
445 if trace.net_stats:
446 for iface, samples in trace.net_stats.items():
447 max_received_sample = max(samples, key=lambda s: s.received_bytes)
448 max_transmitted_sample = max(samples, key=lambda s: s.transmitted_bytes)
449 max_receive_diff_sample = max(samples, key=lambda s: s.receive_diff)
450 max_transmit_diff_sample = max(samples, key=lambda s: s.transmit_diff)
451
452 draw_text(ctx, "Iface: %s" % (iface), TEXT_COLOR, off_x, curr_y+20)
453 draw_legend_line(ctx, "Bytes received (max %d)" % (max_received_sample.received_bytes),
454 BYTES_RECEIVED_COLOR, off_x+150, curr_y+20, leg_s)
455 draw_legend_line(ctx, "Bytes transmitted (max %d)" % (max_transmitted_sample.transmitted_bytes),
456 BYTES_TRANSMITTED_COLOR, off_x+400, curr_y+20, leg_s)
457 draw_legend_box(ctx, "Bytes receive diff (max %d)" % (max_receive_diff_sample.receive_diff),
458 BYTES_RECEIVE_DIFF_COLOR, off_x+650, curr_y+20, leg_s)
459 draw_legend_box(ctx, "Bytes transmit diff (max %d)" % (max_transmit_diff_sample.transmit_diff),
460 BYTES_TRANSMIT_DIFF_COLOR, off_x+900, curr_y+20, leg_s)
461
462
463 chart_rect = (off_x, curr_y + 30, w, bar_h)
464 if clip_visible(clip, chart_rect):
465 draw_box_ticks(ctx, chart_rect, sec_w)
466 draw_annotations(ctx, proc_tree, trace.times, chart_rect)
467
468 if clip_visible (clip, chart_rect):
469 draw_chart (ctx, BYTES_RECEIVED_COLOR, False, chart_rect, \
470 [(sample.time, sample.received_bytes) for sample in samples], \
471 proc_tree, None)
472
473 draw_chart (ctx, BYTES_TRANSMITTED_COLOR, False, chart_rect, \
474 [(sample.time, sample.transmitted_bytes) for sample in samples], \
475 proc_tree, None)
476
477 if clip_visible (clip, chart_rect):
478 draw_chart (ctx, BYTES_RECEIVE_DIFF_COLOR, True, chart_rect, \
479 [(sample.time, sample.receive_diff) for sample in samples], \
480 proc_tree, None)
481
482 draw_chart (ctx, BYTES_TRANSMIT_DIFF_COLOR, True, chart_rect, \
483 [(sample.time, sample.transmit_diff) for sample in samples], \
484 proc_tree, None)
485
486 curr_y = curr_y + 30 + bar_h
487
488 # render CPU pressure chart
489 if trace.cpu_pressure:
490 max_sample_avg = max (trace.cpu_pressure, key = lambda s: s.avg10)
491 max_sample_total = max (trace.cpu_pressure, key = lambda s: s.deltaTotal)
492 draw_legend_line(ctx, "avg10 CPU Pressure (max %d%%)" % (max_sample_avg.avg10), CPU_PRESSURE_AVG10_COLOR, off_x, curr_y+20, leg_s)
493 draw_legend_box(ctx, "delta total CPU Pressure (max %d)" % (max_sample_total.deltaTotal), CPU_PRESSURE_TOTAL_COLOR, off_x + 240, curr_y+20, leg_s)
494
495 # render delta total cpu
496 chart_rect = (off_x, curr_y+30, w, bar_h)
497 if clip_visible (clip, chart_rect):
498 draw_box_ticks (ctx, chart_rect, sec_w)
499 draw_annotations (ctx, proc_tree, trace.times, chart_rect)
500 draw_chart (ctx, CPU_PRESSURE_TOTAL_COLOR, True, chart_rect, \
501 [(sample.time, sample.deltaTotal) for sample in trace.cpu_pressure], \
502 proc_tree, None)
503
504 # render avg10 cpu
505 if clip_visible (clip, chart_rect):
506 draw_chart (ctx, CPU_PRESSURE_AVG10_COLOR, False, chart_rect, \
507 [(sample.time, sample.avg10) for sample in trace.cpu_pressure], \
508 proc_tree, None)
509
510 pos_x = off_x + ((max_sample_avg.time - proc_tree.start_time) * w / proc_tree.duration)
511
512 shift_x, shift_y = -20, 20
513 if (pos_x < off_x + 245):
514 shift_x, shift_y = 5, 40
515
516
517 label = "%d%%" % (max_sample_avg.avg10)
518 draw_text (ctx, label, CPU_PRESSURE_AVG10_COLOR, pos_x + shift_x, curr_y + shift_y)
519
520 curr_y = curr_y + 30 + bar_h
521
522 # render I/O pressure chart
523 if trace.io_pressure:
524 max_sample_avg = max (trace.io_pressure, key = lambda s: s.avg10)
525 max_sample_total = max (trace.io_pressure, key = lambda s: s.deltaTotal)
526 draw_legend_line(ctx, "avg10 I/O Pressure (max %d%%)" % (max_sample_avg.avg10), IO_PRESSURE_AVG10_COLOR, off_x, curr_y+20, leg_s)
527 draw_legend_box(ctx, "delta total I/O Pressure (max %d)" % (max_sample_total.deltaTotal), IO_PRESSURE_TOTAL_COLOR, off_x + 240, curr_y+20, leg_s)
528
529 # render delta total io
530 chart_rect = (off_x, curr_y+30, w, bar_h)
531 if clip_visible (clip, chart_rect):
532 draw_box_ticks (ctx, chart_rect, sec_w)
533 draw_annotations (ctx, proc_tree, trace.times, chart_rect)
534 draw_chart (ctx, IO_PRESSURE_TOTAL_COLOR, True, chart_rect, \
535 [(sample.time, sample.deltaTotal) for sample in trace.io_pressure], \
536 proc_tree, None)
537
538 # render avg10 io
539 if clip_visible (clip, chart_rect):
540 draw_chart (ctx, IO_PRESSURE_AVG10_COLOR, False, chart_rect, \
541 [(sample.time, sample.avg10) for sample in trace.io_pressure], \
542 proc_tree, None)
543
544 pos_x = off_x + ((max_sample_avg.time - proc_tree.start_time) * w / proc_tree.duration)
545
546 shift_x, shift_y = -20, 20
547 if (pos_x < off_x + 245):
548 shift_x, shift_y = 5, 40
549
550
551 label = "%d%%" % (max_sample_avg.avg10)
552 draw_text (ctx, label, IO_PRESSURE_AVG10_COLOR, pos_x + shift_x, curr_y + shift_y)
553
554 curr_y = curr_y + 30 + bar_h
555
556 # render MEM pressure chart
557 if trace.mem_pressure:
558 max_sample_avg = max (trace.mem_pressure, key = lambda s: s.avg10)
559 max_sample_total = max (trace.mem_pressure, key = lambda s: s.deltaTotal)
560 draw_legend_line(ctx, "avg10 MEM Pressure (max %d%%)" % (max_sample_avg.avg10), MEM_PRESSURE_AVG10_COLOR, off_x, curr_y+20, leg_s)
561 draw_legend_box(ctx, "delta total MEM Pressure (max %d)" % (max_sample_total.deltaTotal), MEM_PRESSURE_TOTAL_COLOR, off_x + 240, curr_y+20, leg_s)
562
563 # render delta total mem
564 chart_rect = (off_x, curr_y+30, w, bar_h)
565 if clip_visible (clip, chart_rect):
566 draw_box_ticks (ctx, chart_rect, sec_w)
567 draw_annotations (ctx, proc_tree, trace.times, chart_rect)
568 draw_chart (ctx, MEM_PRESSURE_TOTAL_COLOR, True, chart_rect, \
569 [(sample.time, sample.deltaTotal) for sample in trace.mem_pressure], \
570 proc_tree, None)
571
572 # render avg10 mem
573 if clip_visible (clip, chart_rect):
574 draw_chart (ctx, MEM_PRESSURE_AVG10_COLOR, False, chart_rect, \
575 [(sample.time, sample.avg10) for sample in trace.mem_pressure], \
576 proc_tree, None)
577
578 pos_x = off_x + ((max_sample_avg.time - proc_tree.start_time) * w / proc_tree.duration)
579
580 shift_x, shift_y = -20, 20
581 if (pos_x < off_x + 245):
582 shift_x, shift_y = 5, 40
583
584
585 label = "%d%%" % (max_sample_avg.avg10)
586 draw_text (ctx, label, MEM_PRESSURE_AVG10_COLOR, pos_x + shift_x, curr_y + shift_y)
587
588 curr_y = curr_y + 30 + bar_h
589
418 # render disk space usage 590 # render disk space usage
419 # 591 #
420 # Draws the amount of disk space used on each volume relative to the 592 # Draws the amount of disk space used on each volume relative to the
@@ -496,8 +668,8 @@ def render_charts(ctx, options, clip, trace, curr_y, w, h, sec_w):
496 668
497 return curr_y 669 return curr_y
498 670
499def render_processes_chart(ctx, options, trace, curr_y, w, h, sec_w): 671def render_processes_chart(ctx, options, trace, curr_y, width, h, sec_w):
500 chart_rect = [off_x, curr_y+header_h, w, h - curr_y - 1 * off_y - header_h ] 672 chart_rect = [off_x, curr_y+header_h, width, h - curr_y - 1 * off_y - header_h ]
501 673
502 draw_legend_box (ctx, "Configure", \ 674 draw_legend_box (ctx, "Configure", \
503 TASK_COLOR_CONFIGURE, off_x , curr_y + 45, leg_s) 675 TASK_COLOR_CONFIGURE, off_x , curr_y + 45, leg_s)
@@ -522,8 +694,9 @@ def render_processes_chart(ctx, options, trace, curr_y, w, h, sec_w):
522 offset = trace.min or min(trace.start.keys()) 694 offset = trace.min or min(trace.start.keys())
523 for start in sorted(trace.start.keys()): 695 for start in sorted(trace.start.keys()):
524 for process in sorted(trace.start[start]): 696 for process in sorted(trace.start[start]):
697 elapsed_time = trace.processes[process][1] - start
525 if not options.app_options.show_all and \ 698 if not options.app_options.show_all and \
526 trace.processes[process][1] - start < options.app_options.mintime: 699 elapsed_time < options.app_options.mintime:
527 continue 700 continue
528 task = process.split(":")[1] 701 task = process.split(":")[1]
529 702
@@ -532,14 +705,23 @@ def render_processes_chart(ctx, options, trace, curr_y, w, h, sec_w):
532 #print(s) 705 #print(s)
533 706
534 x = chart_rect[0] + (start - offset) * sec_w 707 x = chart_rect[0] + (start - offset) * sec_w
535 w = ((trace.processes[process][1] - start) * sec_w) 708 w = elapsed_time * sec_w
709
710 def set_alfa(color, alfa):
711 clist = list(color)
712 clist[-1] = alfa
713 return tuple(clist)
536 714
537 #print("proc at %s %s %s %s" % (x, y, w, proc_h)) 715 #print("proc at %s %s %s %s" % (x, y, w, proc_h))
538 col = None 716 col = None
539 if task == "do_compile": 717 if task == "do_compile":
540 col = TASK_COLOR_COMPILE 718 col = TASK_COLOR_COMPILE
719 elif "do_compile" in task:
720 col = set_alfa(TASK_COLOR_COMPILE, 0.25)
541 elif task == "do_configure": 721 elif task == "do_configure":
542 col = TASK_COLOR_CONFIGURE 722 col = TASK_COLOR_CONFIGURE
723 elif "do_configure" in task:
724 col = set_alfa(TASK_COLOR_CONFIGURE, 0.25)
543 elif task == "do_install": 725 elif task == "do_install":
544 col = TASK_COLOR_INSTALL 726 col = TASK_COLOR_INSTALL
545 elif task == "do_populate_sysroot": 727 elif task == "do_populate_sysroot":
@@ -557,7 +739,10 @@ def render_processes_chart(ctx, options, trace, curr_y, w, h, sec_w):
557 draw_fill_rect(ctx, col, (x, y, w, proc_h)) 739 draw_fill_rect(ctx, col, (x, y, w, proc_h))
558 draw_rect(ctx, PROC_BORDER_COLOR, (x, y, w, proc_h)) 740 draw_rect(ctx, PROC_BORDER_COLOR, (x, y, w, proc_h))
559 741
560 draw_label_in_box(ctx, PROC_TEXT_COLOR, process, x, y + proc_h - 4, w, proc_h) 742 # Show elapsed time for each task
743 process = "%ds %s" % (elapsed_time, process)
744 draw_label_in_box(ctx, PROC_TEXT_COLOR, process, x, y + proc_h - 4, w, width)
745
561 y = y + proc_h 746 y = y + proc_h
562 747
563 return curr_y 748 return curr_y
@@ -698,7 +883,7 @@ def draw_processes_recursively(ctx, proc, proc_tree, y, proc_h, rect, clip) :
698 cmdString = proc.cmd 883 cmdString = proc.cmd
699 else: 884 else:
700 cmdString = '' 885 cmdString = ''
701 if (OPTIONS.show_pid or OPTIONS.show_all) and ipid is not 0: 886 if (OPTIONS.show_pid or OPTIONS.show_all) and ipid != 0:
702 cmdString = cmdString + " [" + str(ipid // 1000) + "]" 887 cmdString = cmdString + " [" + str(ipid // 1000) + "]"
703 if OPTIONS.show_all: 888 if OPTIONS.show_all:
704 if proc.args: 889 if proc.args:
@@ -796,7 +981,7 @@ class CumlSample:
796 if self.color is None: 981 if self.color is None:
797 i = self.next() % HSV_MAX_MOD 982 i = self.next() % HSV_MAX_MOD
798 h = 0.0 983 h = 0.0
799 if i is not 0: 984 if i != 0:
800 h = (1.0 * i) / HSV_MAX_MOD 985 h = (1.0 * i) / HSV_MAX_MOD
801 s = 0.5 986 s = 0.5
802 v = 1.0 987 v = 1.0
diff --git a/scripts/pybootchartgui/pybootchartgui/parsing.py b/scripts/pybootchartgui/pybootchartgui/parsing.py
index b42dac6b88..72a54c6ba5 100644
--- a/scripts/pybootchartgui/pybootchartgui/parsing.py
+++ b/scripts/pybootchartgui/pybootchartgui/parsing.py
@@ -48,7 +48,11 @@ class Trace:
48 self.filename = None 48 self.filename = None
49 self.parent_map = None 49 self.parent_map = None
50 self.mem_stats = [] 50 self.mem_stats = []
51 self.net_stats = []
51 self.monitor_disk = None 52 self.monitor_disk = None
53 self.cpu_pressure = []
54 self.io_pressure = []
55 self.mem_pressure = []
52 self.times = [] # Always empty, but expected by draw.py when drawing system charts. 56 self.times = [] # Always empty, but expected by draw.py when drawing system charts.
53 57
54 if len(paths): 58 if len(paths):
@@ -128,7 +132,7 @@ class Trace:
128 def compile(self, writer): 132 def compile(self, writer):
129 133
130 def find_parent_id_for(pid): 134 def find_parent_id_for(pid):
131 if pid is 0: 135 if pid == 0:
132 return 0 136 return 0
133 ppid = self.parent_map.get(pid) 137 ppid = self.parent_map.get(pid)
134 if ppid: 138 if ppid:
@@ -454,7 +458,7 @@ def _parse_proc_disk_stat_log(file):
454 not sda1, sda2 etc. The format of relevant lines should be: 458 not sda1, sda2 etc. The format of relevant lines should be:
455 {major minor name rio rmerge rsect ruse wio wmerge wsect wuse running use aveq} 459 {major minor name rio rmerge rsect ruse wio wmerge wsect wuse running use aveq}
456 """ 460 """
457 disk_regex_re = re.compile ('^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$') 461 disk_regex_re = re.compile (r'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$')
458 462
459 # this gets called an awful lot. 463 # this gets called an awful lot.
460 def is_relevant_line(linetokens): 464 def is_relevant_line(linetokens):
@@ -555,6 +559,44 @@ def _parse_monitor_disk_log(file):
555 return disk_stats 559 return disk_stats
556 560
557 561
562def _parse_reduced_net_log(file):
563 net_stats = {}
564 for time, lines in _parse_timed_blocks(file):
565
566 for line in lines:
567 parts = line.split()
568 iface = parts[0][:-1]
569 if iface not in net_stats:
570 net_stats[iface] = [NetSample(time, iface, int(parts[1]), int(parts[2]), int(parts[3]), int(parts[4]))]
571 else:
572 net_stats[iface].append(NetSample(time, iface, int(parts[1]), int(parts[2]), int(parts[3]), int(parts[4])))
573 return net_stats
574
575
576def _parse_pressure_logs(file, filename):
577 """
578 Parse file for "some" pressure with 'avg10', 'avg60' 'avg300' and delta total values
579 (in that order) directly stored on one line for both CPU and IO, based on filename.
580 """
581 pressure_stats = []
582 if filename == "cpu.log":
583 SamplingClass = CPUPressureSample
584 elif filename == "memory.log":
585 SamplingClass = MemPressureSample
586 else:
587 SamplingClass = IOPressureSample
588 for time, lines in _parse_timed_blocks(file):
589 for line in lines:
590 if not line: continue
591 tokens = line.split()
592 avg10 = float(tokens[0])
593 avg60 = float(tokens[1])
594 avg300 = float(tokens[2])
595 delta = float(tokens[3])
596 pressure_stats.append(SamplingClass(time, avg10, avg60, avg300, delta))
597
598 return pressure_stats
599
558# if we boot the kernel with: initcall_debug printk.time=1 we can 600# if we boot the kernel with: initcall_debug printk.time=1 we can
559# get all manner of interesting data from the dmesg output 601# get all manner of interesting data from the dmesg output
560# We turn this into a pseudo-process tree: each event is 602# We turn this into a pseudo-process tree: each event is
@@ -568,8 +610,8 @@ def _parse_monitor_disk_log(file):
568# [ 0.039993] calling migration_init+0x0/0x6b @ 1 610# [ 0.039993] calling migration_init+0x0/0x6b @ 1
569# [ 0.039993] initcall migration_init+0x0/0x6b returned 1 after 0 usecs 611# [ 0.039993] initcall migration_init+0x0/0x6b returned 1 after 0 usecs
570def _parse_dmesg(writer, file): 612def _parse_dmesg(writer, file):
571 timestamp_re = re.compile ("^\[\s*(\d+\.\d+)\s*]\s+(.*)$") 613 timestamp_re = re.compile (r"^\[\s*(\d+\.\d+)\s*]\s+(.*)$")
572 split_re = re.compile ("^(\S+)\s+([\S\+_-]+) (.*)$") 614 split_re = re.compile (r"^(\S+)\s+([\S\+_-]+) (.*)$")
573 processMap = {} 615 processMap = {}
574 idx = 0 616 idx = 0
575 inc = 1.0 / 1000000 617 inc = 1.0 / 1000000
@@ -614,7 +656,7 @@ def _parse_dmesg(writer, file):
614# print "foo: '%s' '%s' '%s'" % (type, func, rest) 656# print "foo: '%s' '%s' '%s'" % (type, func, rest)
615 if type == "calling": 657 if type == "calling":
616 ppid = kernel.pid 658 ppid = kernel.pid
617 p = re.match ("\@ (\d+)", rest) 659 p = re.match (r"\@ (\d+)", rest)
618 if p is not None: 660 if p is not None:
619 ppid = float (p.group(1)) // 1000 661 ppid = float (p.group(1)) // 1000
620# print "match: '%s' ('%g') at '%s'" % (func, ppid, time_ms) 662# print "match: '%s' ('%g') at '%s'" % (func, ppid, time_ms)
@@ -716,7 +758,7 @@ def get_num_cpus(headers):
716 cpu_model = headers.get("system.cpu") 758 cpu_model = headers.get("system.cpu")
717 if cpu_model is None: 759 if cpu_model is None:
718 return 1 760 return 1
719 mat = re.match(".*\\((\\d+)\\)", cpu_model) 761 mat = re.match(r".*\\((\\d+)\\)", cpu_model)
720 if mat is None: 762 if mat is None:
721 return 1 763 return 1
722 return max (int(mat.group(1)), 1) 764 return max (int(mat.group(1)), 1)
@@ -741,6 +783,15 @@ def _do_parse(writer, state, filename, file):
741 state.cmdline = _parse_cmdline_log(writer, file) 783 state.cmdline = _parse_cmdline_log(writer, file)
742 elif name == "monitor_disk.log": 784 elif name == "monitor_disk.log":
743 state.monitor_disk = _parse_monitor_disk_log(file) 785 state.monitor_disk = _parse_monitor_disk_log(file)
786 elif name == "reduced_proc_net.log":
787 state.net_stats = _parse_reduced_net_log(file)
788 #pressure logs are in a subdirectory
789 elif name == "cpu.log":
790 state.cpu_pressure = _parse_pressure_logs(file, name)
791 elif name == "io.log":
792 state.io_pressure = _parse_pressure_logs(file, name)
793 elif name == "memory.log":
794 state.mem_pressure = _parse_pressure_logs(file, name)
744 elif not filename.endswith('.log'): 795 elif not filename.endswith('.log'):
745 _parse_bitbake_buildstats(writer, state, filename, file) 796 _parse_bitbake_buildstats(writer, state, filename, file)
746 t2 = time.process_time() 797 t2 = time.process_time()
diff --git a/scripts/pybootchartgui/pybootchartgui/samples.py b/scripts/pybootchartgui/pybootchartgui/samples.py
index 9fc309b3ab..7c92d2ce6a 100644
--- a/scripts/pybootchartgui/pybootchartgui/samples.py
+++ b/scripts/pybootchartgui/pybootchartgui/samples.py
@@ -37,6 +37,41 @@ class CPUSample:
37 return str(self.time) + "\t" + str(self.user) + "\t" + \ 37 return str(self.time) + "\t" + str(self.user) + "\t" + \
38 str(self.sys) + "\t" + str(self.io) + "\t" + str (self.swap) 38 str(self.sys) + "\t" + str(self.io) + "\t" + str (self.swap)
39 39
40
41class NetSample:
42 def __init__(self, time, iface, received_bytes, transmitted_bytes, receive_diff, transmit_diff):
43 self.time = time
44 self.iface = iface
45 self.received_bytes = received_bytes
46 self.transmitted_bytes = transmitted_bytes
47 self.receive_diff = receive_diff
48 self.transmit_diff = transmit_diff
49
50class CPUPressureSample:
51 def __init__(self, time, avg10, avg60, avg300, deltaTotal):
52 self.time = time
53 self.avg10 = avg10
54 self.avg60 = avg60
55 self.avg300 = avg300
56 self.deltaTotal = deltaTotal
57
58class IOPressureSample:
59 def __init__(self, time, avg10, avg60, avg300, deltaTotal):
60 self.time = time
61 self.avg10 = avg10
62 self.avg60 = avg60
63 self.avg300 = avg300
64 self.deltaTotal = deltaTotal
65
66class MemPressureSample:
67 def __init__(self, time, avg10, avg60, avg300, deltaTotal):
68 self.time = time
69 self.avg10 = avg10
70 self.avg60 = avg60
71 self.avg300 = avg300
72 self.deltaTotal = deltaTotal
73
74
40class MemSample: 75class MemSample:
41 used_values = ('MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapTotal', 'SwapFree',) 76 used_values = ('MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapTotal', 'SwapFree',)
42 77
diff --git a/scripts/pythondeps b/scripts/pythondeps
index be21dd84eb..48277ec28a 100755
--- a/scripts/pythondeps
+++ b/scripts/pythondeps
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Determine dependencies of python scripts or available python modules in a search path. 7# Determine dependencies of python scripts or available python modules in a search path.
diff --git a/scripts/relocate_sdk.py b/scripts/relocate_sdk.py
index 8c0fdb986a..9e01c09cb0 100755
--- a/scripts/relocate_sdk.py
+++ b/scripts/relocate_sdk.py
@@ -30,9 +30,16 @@ else:
30old_prefix = re.compile(b("##DEFAULT_INSTALL_DIR##")) 30old_prefix = re.compile(b("##DEFAULT_INSTALL_DIR##"))
31 31
32def get_arch(): 32def get_arch():
33 global endian_prefix
33 f.seek(0) 34 f.seek(0)
34 e_ident =f.read(16) 35 e_ident =f.read(16)
35 ei_mag0,ei_mag1_3,ei_class = struct.unpack("<B3sB11x", e_ident) 36 ei_mag0,ei_mag1_3,ei_class,ei_data,ei_version = struct.unpack("<B3sBBB9x", e_ident)
37
38 # ei_data = 1 for little-endian & 0 for big-endian
39 if ei_data == 1:
40 endian_prefix = '<'
41 else:
42 endian_prefix = '>'
36 43
37 if (ei_mag0 != 0x7f and ei_mag1_3 != "ELF") or ei_class == 0: 44 if (ei_mag0 != 0x7f and ei_mag1_3 != "ELF") or ei_class == 0:
38 return 0 45 return 0
@@ -42,6 +49,34 @@ def get_arch():
42 elif ei_class == 2: 49 elif ei_class == 2:
43 return 64 50 return 64
44 51
52def get_dl_arch(dl_path):
53 try:
54 with open(dl_path, "r+b") as f:
55 e_ident =f.read(16)
56 except IOError:
57 exctype, ioex = sys.exc_info()[:2]
58 if ioex.errno == errno.ETXTBSY:
59 print("Could not open %s. File used by another process.\nPlease "\
60 "make sure you exit all processes that might use any SDK "\
61 "binaries." % e)
62 else:
63 print("Could not open %s: %s(%d)" % (e, ioex.strerror, ioex.errno))
64 sys.exit(-1)
65
66 ei_mag0,ei_mag1_3,ei_class,ei_data,ei_version = struct.unpack("<B3sBBB9x", e_ident)
67
68 if (ei_mag0 != 0x7f and ei_mag1_3 != "ELF") or ei_class == 0:
69 print("ERROR: unknow %s" % dl_path)
70 sys.exit(-1)
71
72 if ei_class == 1:
73 arch = 32
74 elif ei_class == 2:
75 arch = 64
76
77 return arch
78
79
45def parse_elf_header(): 80def parse_elf_header():
46 global e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, e_flags,\ 81 global e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, e_flags,\
47 e_ehsize, e_phentsize, e_phnum, e_shentsize, e_shnum, e_shstrndx 82 e_ehsize, e_phentsize, e_phnum, e_shentsize, e_shnum, e_shstrndx
@@ -51,11 +86,11 @@ def parse_elf_header():
51 86
52 if arch == 32: 87 if arch == 32:
53 # 32bit 88 # 32bit
54 hdr_fmt = "<HHILLLIHHHHHH" 89 hdr_fmt = endian_prefix + "HHILLLIHHHHHH"
55 hdr_size = 52 90 hdr_size = 52
56 else: 91 else:
57 # 64bit 92 # 64bit
58 hdr_fmt = "<HHIQQQIHHHHHH" 93 hdr_fmt = endian_prefix + "HHIQQQIHHHHHH"
59 hdr_size = 64 94 hdr_size = 64
60 95
61 e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, e_flags,\ 96 e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, e_flags,\
@@ -64,9 +99,9 @@ def parse_elf_header():
64 99
65def change_interpreter(elf_file_name): 100def change_interpreter(elf_file_name):
66 if arch == 32: 101 if arch == 32:
67 ph_fmt = "<IIIIIIII" 102 ph_fmt = endian_prefix + "IIIIIIII"
68 else: 103 else:
69 ph_fmt = "<IIQQQQQQ" 104 ph_fmt = endian_prefix + "IIQQQQQQ"
70 105
71 """ look for PT_INTERP section """ 106 """ look for PT_INTERP section """
72 for i in range(0,e_phnum): 107 for i in range(0,e_phnum):
@@ -97,25 +132,26 @@ def change_interpreter(elf_file_name):
97 if (len(new_dl_path) >= p_filesz): 132 if (len(new_dl_path) >= p_filesz):
98 print("ERROR: could not relocate %s, interp size = %i and %i is needed." \ 133 print("ERROR: could not relocate %s, interp size = %i and %i is needed." \
99 % (elf_file_name, p_memsz, len(new_dl_path) + 1)) 134 % (elf_file_name, p_memsz, len(new_dl_path) + 1))
100 break 135 return False
101 dl_path = new_dl_path + b("\0") * (p_filesz - len(new_dl_path)) 136 dl_path = new_dl_path + b("\0") * (p_filesz - len(new_dl_path))
102 f.seek(p_offset) 137 f.seek(p_offset)
103 f.write(dl_path) 138 f.write(dl_path)
104 break 139 break
140 return True
105 141
106def change_dl_sysdirs(elf_file_name): 142def change_dl_sysdirs(elf_file_name):
107 if arch == 32: 143 if arch == 32:
108 sh_fmt = "<IIIIIIIIII" 144 sh_fmt = endian_prefix + "IIIIIIIIII"
109 else: 145 else:
110 sh_fmt = "<IIQQQQIIQQ" 146 sh_fmt = endian_prefix + "IIQQQQIIQQ"
111 147
112 """ read section string table """ 148 """ read section string table """
113 f.seek(e_shoff + e_shstrndx * e_shentsize) 149 f.seek(e_shoff + e_shstrndx * e_shentsize)
114 sh_hdr = f.read(e_shentsize) 150 sh_hdr = f.read(e_shentsize)
115 if arch == 32: 151 if arch == 32:
116 sh_offset, sh_size = struct.unpack("<16xII16x", sh_hdr) 152 sh_offset, sh_size = struct.unpack(endian_prefix + "16xII16x", sh_hdr)
117 else: 153 else:
118 sh_offset, sh_size = struct.unpack("<24xQQ24x", sh_hdr) 154 sh_offset, sh_size = struct.unpack(endian_prefix + "24xQQ24x", sh_hdr)
119 155
120 f.seek(sh_offset) 156 f.seek(sh_offset)
121 sh_strtab = f.read(sh_size) 157 sh_strtab = f.read(sh_size)
@@ -215,6 +251,9 @@ else:
215 251
216executables_list = sys.argv[3:] 252executables_list = sys.argv[3:]
217 253
254dl_arch = get_dl_arch(new_dl_path)
255
256errors = False
218for e in executables_list: 257for e in executables_list:
219 perms = os.stat(e)[stat.ST_MODE] 258 perms = os.stat(e)[stat.ST_MODE]
220 if os.access(e, os.W_OK|os.R_OK): 259 if os.access(e, os.W_OK|os.R_OK):
@@ -238,9 +277,10 @@ for e in executables_list:
238 old_size = os.path.getsize(e) 277 old_size = os.path.getsize(e)
239 if old_size >= 64: 278 if old_size >= 64:
240 arch = get_arch() 279 arch = get_arch()
241 if arch: 280 if arch and arch == dl_arch:
242 parse_elf_header() 281 parse_elf_header()
243 change_interpreter(e) 282 if not change_interpreter(e):
283 errors = True
244 change_dl_sysdirs(e) 284 change_dl_sysdirs(e)
245 285
246 """ change permissions back """ 286 """ change permissions back """
@@ -253,3 +293,6 @@ for e in executables_list:
253 print("New file size for %s is different. Looks like a relocation error!", e) 293 print("New file size for %s is different. Looks like a relocation error!", e)
254 sys.exit(-1) 294 sys.exit(-1)
255 295
296if errors:
297 print("Relocation of one or more executables failed.")
298 sys.exit(-1)
diff --git a/scripts/resulttool b/scripts/resulttool
index fc282bda6c..66a6af9959 100755
--- a/scripts/resulttool
+++ b/scripts/resulttool
@@ -15,6 +15,9 @@
15# To report test report, execute the below 15# To report test report, execute the below
16# $ resulttool report <source_dir> 16# $ resulttool report <source_dir>
17# 17#
18# To create a unit test report in JUnit XML format, execute the below
19# $ resulttool junit <json_file>
20#
18# To perform regression file analysis, execute the below 21# To perform regression file analysis, execute the below
19# $ resulttool regression-file <base_result_file> <target_result_file> 22# $ resulttool regression-file <base_result_file> <target_result_file>
20# 23#
@@ -43,6 +46,7 @@ import resulttool.regression
43import resulttool.report 46import resulttool.report
44import resulttool.manualexecution 47import resulttool.manualexecution
45import resulttool.log 48import resulttool.log
49import resulttool.junit
46logger = scriptutils.logger_create('resulttool') 50logger = scriptutils.logger_create('resulttool')
47 51
48def main(): 52def main():
@@ -61,6 +65,7 @@ def main():
61 resulttool.regression.register_commands(subparsers) 65 resulttool.regression.register_commands(subparsers)
62 resulttool.report.register_commands(subparsers) 66 resulttool.report.register_commands(subparsers)
63 resulttool.log.register_commands(subparsers) 67 resulttool.log.register_commands(subparsers)
68 resulttool.junit.register_commands(subparsers)
64 69
65 args = parser.parse_args() 70 args = parser.parse_args()
66 if args.debug: 71 if args.debug:
diff --git a/scripts/rpm2cpio.sh b/scripts/rpm2cpio.sh
index 7cd771bbe7..8199b43784 100755
--- a/scripts/rpm2cpio.sh
+++ b/scripts/rpm2cpio.sh
@@ -7,7 +7,7 @@ fatal() {
7} 7}
8 8
9pkg="$1" 9pkg="$1"
10[ -n "$pkg" -a -e "$pkg" ] || 10[ -n "$pkg" ] && [ -e "$pkg" ] ||
11 fatal "No package supplied" 11 fatal "No package supplied"
12 12
13_dd() { 13_dd() {
@@ -16,14 +16,23 @@ _dd() {
16} 16}
17 17
18calcsize() { 18calcsize() {
19
20 case "$(_dd $1 bs=4 count=1 | tr -d '\0')" in
21 "$(printf '\216\255\350')"*) ;; # '\x8e\xad\xe8'
22 *) fatal "File doesn't look like rpm: $pkg" ;;
23 esac
24
19 offset=$(($1 + 8)) 25 offset=$(($1 + 8))
20 26
21 local i b b0 b1 b2 b3 b4 b5 b6 b7 27 local i b b0 b1 b2 b3 b4 b5 b6 b7
22 28
23 i=0 29 i=0
24 while [ $i -lt 8 ]; do 30 while [ $i -lt 8 ]; do
25 b=$(_dd $(($offset + $i)) bs=1 count=1; echo X) 31 # add . to not loose \n
26 b=${b%X} 32 # strip \0 as it gets dropped with warning otherwise
33 b="$(_dd $(($offset + $i)) bs=1 count=1 | tr -d '\0' ; echo .)"
34 b=${b%.} # strip . again
35
27 [ -z "$b" ] && 36 [ -z "$b" ] &&
28 b="0" || 37 b="0" ||
29 b="$(exec printf '%u\n' "'$b")" 38 b="$(exec printf '%u\n' "'$b")"
@@ -35,7 +44,7 @@ calcsize() {
35 offset=$(($offset + $rsize)) 44 offset=$(($offset + $rsize))
36} 45}
37 46
38case "$(_dd 0 bs=8 count=1)" in 47case "$(_dd 0 bs=4 count=1 | tr -d '\0')" in
39 "$(printf '\355\253\356\333')"*) ;; # '\xed\xab\xee\xdb' 48 "$(printf '\355\253\356\333')"*) ;; # '\xed\xab\xee\xdb'
40 *) fatal "File doesn't look like rpm: $pkg" ;; 49 *) fatal "File doesn't look like rpm: $pkg" ;;
41esac 50esac
@@ -46,10 +55,11 @@ sigsize=$rsize
46calcsize $(($offset + (8 - ($sigsize % 8)) % 8)) 55calcsize $(($offset + (8 - ($sigsize % 8)) % 8))
47hdrsize=$rsize 56hdrsize=$rsize
48 57
49case "$(_dd $offset bs=3 count=1)" in 58case "$(_dd $offset bs=2 count=1 | tr -d '\0')" in
50 "$(printf '\102\132')"*) _dd $offset | bunzip2 ;; # '\x42\x5a' 59 "$(printf '\102\132')") _dd $offset | bunzip2 ;; # '\x42\x5a'
51 "$(printf '\037\213')"*) _dd $offset | gunzip ;; # '\x1f\x8b' 60 "$(printf '\037\213')") _dd $offset | gunzip ;; # '\x1f\x8b'
52 "$(printf '\375\067')"*) _dd $offset | xzcat ;; # '\xfd\x37' 61 "$(printf '\375\067')") _dd $offset | xzcat ;; # '\xfd\x37'
53 "$(printf '\135\000')"*) _dd $offset | unlzma ;; # '\x5d\x00' 62 "$(printf '\135')") _dd $offset | unlzma ;; # '\x5d\x00'
54 *) fatal "Unrecognized rpm file: $pkg" ;; 63 "$(printf '\050\265')") _dd $offset | unzstd ;; # '\x28\xb5'
64 *) fatal "Unrecognized payload compression format in rpm file: $pkg" ;;
55esac 65esac
diff --git a/scripts/runqemu b/scripts/runqemu
index efb98ab9e0..3d77046972 100755
--- a/scripts/runqemu
+++ b/scripts/runqemu
@@ -66,6 +66,7 @@ of the following environment variables (in any order):
66 MACHINE - the machine name (optional, autodetected from KERNEL filename if unspecified) 66 MACHINE - the machine name (optional, autodetected from KERNEL filename if unspecified)
67 Simplified QEMU command-line options can be passed with: 67 Simplified QEMU command-line options can be passed with:
68 nographic - disable video console 68 nographic - disable video console
69 nonetwork - disable network connectivity
69 novga - Disable VGA emulation completely 70 novga - Disable VGA emulation completely
70 sdl - choose the SDL UI frontend 71 sdl - choose the SDL UI frontend
71 gtk - choose the Gtk UI frontend 72 gtk - choose the Gtk UI frontend
@@ -73,15 +74,17 @@ of the following environment variables (in any order):
73 gl-es - enable virgl-based GL acceleration, using OpenGL ES (also needs gtk or sdl options) 74 gl-es - enable virgl-based GL acceleration, using OpenGL ES (also needs gtk or sdl options)
74 egl-headless - enable headless EGL output; use vnc (via publicvnc option) or spice to see it 75 egl-headless - enable headless EGL output; use vnc (via publicvnc option) or spice to see it
75 (hint: if /dev/dri/renderD* is absent due to lack of suitable GPU, 'modprobe vgem' will create 76 (hint: if /dev/dri/renderD* is absent due to lack of suitable GPU, 'modprobe vgem' will create
76 one sutable for mesa llvmpipe sofware renderer) 77 one suitable for mesa llvmpipe software renderer)
77 serial - enable a serial console on /dev/ttyS0 78 serial - enable a serial console on /dev/ttyS0
78 serialstdio - enable a serial console on the console (regardless of graphics mode) 79 serialstdio - enable a serial console on the console (regardless of graphics mode)
79 slirp - enable user networking, no root privileges is required 80 slirp - enable user networking, no root privilege is required
80 snapshot - don't write changes to back to images 81 snapshot - don't write changes back to images
81 kvm - enable KVM when running x86/x86_64 (VT-capable CPU required) 82 kvm - enable KVM when running x86/x86_64 (VT-capable CPU required)
82 kvm-vhost - enable KVM with vhost when running x86/x86_64 (VT-capable CPU required) 83 kvm-vhost - enable KVM with vhost when running x86/x86_64 (VT-capable CPU required)
83 publicvnc - enable a VNC server open to all hosts 84 publicvnc - enable a VNC server open to all hosts
84 audio - enable audio 85 audio - enable audio
86 guestagent - enable guest agent communication
87 qmp=<path> - create a QMP socket (defaults to unix:qmp.sock if unspecified)
85 [*/]ovmf* - OVMF firmware file or base name for booting with UEFI 88 [*/]ovmf* - OVMF firmware file or base name for booting with UEFI
86 tcpserial=<port> - specify tcp serial port number 89 tcpserial=<port> - specify tcp serial port number
87 qemuparams=<xyz> - specify custom parameters to QEMU 90 qemuparams=<xyz> - specify custom parameters to QEMU
@@ -116,10 +119,10 @@ def check_tun():
116 if not os.access(dev_tun, os.W_OK): 119 if not os.access(dev_tun, os.W_OK):
117 raise RunQemuError("TUN control device %s is not writable, please fix (e.g. sudo chmod 666 %s)" % (dev_tun, dev_tun)) 120 raise RunQemuError("TUN control device %s is not writable, please fix (e.g. sudo chmod 666 %s)" % (dev_tun, dev_tun))
118 121
119def get_first_file(cmds): 122def get_first_file(globs):
120 """Return first file found in wildcard cmds""" 123 """Return first file found in wildcard globs"""
121 for cmd in cmds: 124 for g in globs:
122 all_files = glob.glob(cmd) 125 all_files = glob.glob(g)
123 if all_files: 126 if all_files:
124 for f in all_files: 127 for f in all_files:
125 if not os.path.isdir(f): 128 if not os.path.isdir(f):
@@ -177,11 +180,13 @@ class BaseConfig(object):
177 self.serialconsole = False 180 self.serialconsole = False
178 self.serialstdio = False 181 self.serialstdio = False
179 self.nographic = False 182 self.nographic = False
183 self.nonetwork = False
180 self.sdl = False 184 self.sdl = False
181 self.gtk = False 185 self.gtk = False
182 self.gl = False 186 self.gl = False
183 self.gl_es = False 187 self.gl_es = False
184 self.egl_headless = False 188 self.egl_headless = False
189 self.publicvnc = False
185 self.novga = False 190 self.novga = False
186 self.cleantap = False 191 self.cleantap = False
187 self.saved_stty = '' 192 self.saved_stty = ''
@@ -194,12 +199,14 @@ class BaseConfig(object):
194 self.snapshot = False 199 self.snapshot = False
195 self.wictypes = ('wic', 'wic.vmdk', 'wic.qcow2', 'wic.vdi', "wic.vhd", "wic.vhdx") 200 self.wictypes = ('wic', 'wic.vmdk', 'wic.qcow2', 'wic.vdi', "wic.vhd", "wic.vhdx")
196 self.fstypes = ('ext2', 'ext3', 'ext4', 'jffs2', 'nfs', 'btrfs', 201 self.fstypes = ('ext2', 'ext3', 'ext4', 'jffs2', 'nfs', 'btrfs',
197 'cpio.gz', 'cpio', 'ramfs', 'tar.bz2', 'tar.gz') 202 'cpio.gz', 'cpio', 'ramfs', 'tar.bz2', 'tar.gz',
203 'squashfs', 'squashfs-xz', 'squashfs-lzo',
204 'squashfs-lz4', 'squashfs-zst')
198 self.vmtypes = ('hddimg', 'iso') 205 self.vmtypes = ('hddimg', 'iso')
199 self.fsinfo = {} 206 self.fsinfo = {}
200 self.network_device = "-device e1000,netdev=net0,mac=@MAC@" 207 self.network_device = "-device e1000,netdev=net0,mac=@MAC@"
201 self.cmdline_ip_slirp = "ip=dhcp" 208 self.cmdline_ip_slirp = "ip=dhcp"
202 self.cmdline_ip_tap = "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0" 209 self.cmdline_ip_tap = "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0::eth0:off:8.8.8.8 net.ifnames=0"
203 # Use different mac section for tap and slirp to avoid 210 # Use different mac section for tap and slirp to avoid
204 # conflicts, e.g., when one is running with tap, the other is 211 # conflicts, e.g., when one is running with tap, the other is
205 # running with slirp. 212 # running with slirp.
@@ -209,11 +216,15 @@ class BaseConfig(object):
209 self.mac_tap = "52:54:00:12:34:" 216 self.mac_tap = "52:54:00:12:34:"
210 self.mac_slirp = "52:54:00:12:35:" 217 self.mac_slirp = "52:54:00:12:35:"
211 # pid of the actual qemu process 218 # pid of the actual qemu process
212 self.qemupid = None 219 self.qemu_environ = os.environ.copy()
220 self.qemuprocess = None
213 # avoid cleanup twice 221 # avoid cleanup twice
214 self.cleaned = False 222 self.cleaned = False
215 # Files to cleanup after run 223 # Files to cleanup after run
216 self.cleanup_files = [] 224 self.cleanup_files = []
225 self.qmp = None
226 self.guest_agent = False
227 self.guest_agent_sockpath = '/tmp/qga.sock'
217 228
218 def acquire_taplock(self, error=True): 229 def acquire_taplock(self, error=True):
219 logger.debug("Acquiring lockfile %s..." % self.taplock) 230 logger.debug("Acquiring lockfile %s..." % self.taplock)
@@ -352,21 +363,21 @@ class BaseConfig(object):
352 def check_arg_path(self, p): 363 def check_arg_path(self, p):
353 """ 364 """
354 - Check whether it is <image>.qemuboot.conf or contains <image>.qemuboot.conf 365 - Check whether it is <image>.qemuboot.conf or contains <image>.qemuboot.conf
355 - Check whether is a kernel file 366 - Check whether it is a kernel file
356 - Check whether is a image file 367 - Check whether it is an image file
357 - Check whether it is a nfs dir 368 - Check whether it is an NFS dir
358 - Check whether it is a OVMF flash file 369 - Check whether it is an OVMF flash file
359 """ 370 """
360 if p.endswith('.qemuboot.conf'): 371 if p.endswith('.qemuboot.conf'):
361 self.qemuboot = p 372 self.qemuboot = p
362 self.qbconfload = True 373 self.qbconfload = True
363 elif re.search('\.bin$', p) or re.search('bzImage', p) or \ 374 elif re.search('\\.bin$', p) or re.search('bzImage', p) or \
364 re.search('zImage', p) or re.search('vmlinux', p) or \ 375 re.search('zImage', p) or re.search('vmlinux', p) or \
365 re.search('fitImage', p) or re.search('uImage', p): 376 re.search('fitImage', p) or re.search('uImage', p):
366 self.kernel = p 377 self.kernel = p
367 elif os.path.exists(p) and (not os.path.isdir(p)) and '-image-' in os.path.basename(p): 378 elif os.path.isfile(p) and ('-image-' in os.path.basename(p) or '.rootfs.' in os.path.basename(p)):
368 self.rootfs = p 379 self.rootfs = p
369 # Check filename against self.fstypes can hanlde <file>.cpio.gz, 380 # Check filename against self.fstypes can handle <file>.cpio.gz,
370 # otherwise, its type would be "gz", which is incorrect. 381 # otherwise, its type would be "gz", which is incorrect.
371 fst = "" 382 fst = ""
372 for t in self.fstypes: 383 for t in self.fstypes:
@@ -374,18 +385,24 @@ class BaseConfig(object):
374 fst = t 385 fst = t
375 break 386 break
376 if not fst: 387 if not fst:
377 m = re.search('.*\.(.*)$', self.rootfs) 388 m = re.search('.*\\.(.*)$', self.rootfs)
378 if m: 389 if m:
379 fst = m.group(1) 390 fst = m.group(1)
380 if fst: 391 if fst:
381 self.check_arg_fstype(fst) 392 self.check_arg_fstype(fst)
382 qb = re.sub('\.' + fst + "$", '', self.rootfs) 393 qb = re.sub('\\.' + fst + "$", '.qemuboot.conf', self.rootfs)
383 qb = '%s%s' % (re.sub('\.rootfs$', '', qb), '.qemuboot.conf')
384 if os.path.exists(qb): 394 if os.path.exists(qb):
385 self.qemuboot = qb 395 self.qemuboot = qb
386 self.qbconfload = True 396 self.qbconfload = True
387 else: 397 else:
388 logger.warning("%s doesn't exist" % qb) 398 logger.warning("%s doesn't exist, will try to remove '.rootfs' from filename" % qb)
399 # They to remove .rootfs (IMAGE_NAME_SUFFIX) as well
400 qb = re.sub('\\.rootfs.qemuboot.conf$', '.qemuboot.conf', qb)
401 if os.path.exists(qb):
402 self.qemuboot = qb
403 self.qbconfload = True
404 else:
405 logger.warning("%s doesn't exist" % qb)
389 else: 406 else:
390 raise RunQemuError("Can't find FSTYPE from: %s" % p) 407 raise RunQemuError("Can't find FSTYPE from: %s" % p)
391 408
@@ -419,6 +436,7 @@ class BaseConfig(object):
419 # are there other scenarios in which we need to support being 436 # are there other scenarios in which we need to support being
420 # invoked by bitbake? 437 # invoked by bitbake?
421 deploy = self.get('DEPLOY_DIR_IMAGE') 438 deploy = self.get('DEPLOY_DIR_IMAGE')
439 image_link_name = self.get('IMAGE_LINK_NAME')
422 bbchild = deploy and self.get('OE_TMPDIR') 440 bbchild = deploy and self.get('OE_TMPDIR')
423 if bbchild: 441 if bbchild:
424 self.set_machine_deploy_dir(arg, deploy) 442 self.set_machine_deploy_dir(arg, deploy)
@@ -443,23 +461,27 @@ class BaseConfig(object):
443 else: 461 else:
444 logger.error("%s not a directory valid DEPLOY_DIR_IMAGE" % deploy_dir_image) 462 logger.error("%s not a directory valid DEPLOY_DIR_IMAGE" % deploy_dir_image)
445 self.set("MACHINE", arg) 463 self.set("MACHINE", arg)
446 464 if not image_link_name:
447 def set_dri_path(self): 465 s = re.search('^IMAGE_LINK_NAME="(.*)"', self.bitbake_e, re.M)
448 # As runqemu can be run within bitbake (when using testimage, for example), 466 if s:
449 # we need to ensure that we run host pkg-config, and that it does not 467 image_link_name = s.group(1)
450 # get mis-directed to native build paths set by bitbake. 468 self.set("IMAGE_LINK_NAME", image_link_name)
451 try: 469 logger.debug('Using IMAGE_LINK_NAME = "%s"' % image_link_name)
452 del os.environ['PKG_CONFIG_PATH'] 470
453 del os.environ['PKG_CONFIG_DIR'] 471 def set_mesa_paths(self):
454 del os.environ['PKG_CONFIG_LIBDIR'] 472 drivers_path = os.path.join(self.bindir_native, '../lib/dri')
455 del os.environ['PKG_CONFIG_SYSROOT_DIR'] 473 gbm_path = os.path.join(self.bindir_native, '../lib/gbm')
456 except KeyError: 474 if not os.path.exists(drivers_path) or not os.listdir(drivers_path) \
457 pass 475 or not os.path.exists(gbm_path) or not os.listdir(gbm_path):
458 try: 476 raise RunQemuError("""
459 dripath = subprocess.check_output("PATH=/bin:/usr/bin:$PATH pkg-config --variable=dridriverdir dri", shell=True) 477qemu has been built without opengl support and accelerated graphics support is not available.
460 except subprocess.CalledProcessError as e: 478To enable it, add:
461 raise RunQemuError("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.") 479DISTRO_FEATURES_NATIVE:append = " opengl"
462 os.environ['LIBGL_DRIVERS_PATH'] = dripath.decode('utf-8').strip() 480DISTRO_FEATURES_NATIVESDK:append = " opengl"
481to your build configuration.
482""")
483 self.qemu_environ['LIBGL_DRIVERS_PATH'] = drivers_path
484 self.qemu_environ['GBM_BACKENDS_PATH'] = gbm_path
463 485
464 def check_args(self): 486 def check_args(self):
465 for debug in ("-d", "--debug"): 487 for debug in ("-d", "--debug"):
@@ -473,7 +495,8 @@ class BaseConfig(object):
473 sys.argv.remove(quiet) 495 sys.argv.remove(quiet)
474 496
475 if 'gl' not in sys.argv[1:] and 'gl-es' not in sys.argv[1:]: 497 if 'gl' not in sys.argv[1:] and 'gl-es' not in sys.argv[1:]:
476 os.environ['SDL_RENDER_DRIVER'] = 'software' 498 self.qemu_environ['SDL_RENDER_DRIVER'] = 'software'
499 self.qemu_environ['SDL_FRAMEBUFFER_ACCELERATION'] = 'false'
477 500
478 unknown_arg = "" 501 unknown_arg = ""
479 for arg in sys.argv[1:]: 502 for arg in sys.argv[1:]:
@@ -481,13 +504,15 @@ class BaseConfig(object):
481 self.check_arg_fstype(arg) 504 self.check_arg_fstype(arg)
482 elif arg == 'nographic': 505 elif arg == 'nographic':
483 self.nographic = True 506 self.nographic = True
507 elif arg == "nonetwork":
508 self.nonetwork = True
484 elif arg == 'sdl': 509 elif arg == 'sdl':
485 self.sdl = True 510 self.sdl = True
486 elif arg == 'gtk': 511 elif arg == 'gtk':
487 self.gtk = True 512 self.gtk = True
488 elif arg == 'gl': 513 elif arg == 'gl':
489 self.gl = True 514 self.gl = True
490 elif 'gl-es' in sys.argv[1:]: 515 elif arg == 'gl-es':
491 self.gl_es = True 516 self.gl_es = True
492 elif arg == 'egl-headless': 517 elif arg == 'egl-headless':
493 self.egl_headless = True 518 self.egl_headless = True
@@ -512,7 +537,16 @@ class BaseConfig(object):
512 elif arg == 'snapshot': 537 elif arg == 'snapshot':
513 self.snapshot = True 538 self.snapshot = True
514 elif arg == 'publicvnc': 539 elif arg == 'publicvnc':
540 self.publicvnc = True
515 self.qemu_opt_script += ' -vnc :0' 541 self.qemu_opt_script += ' -vnc :0'
542 elif arg == 'guestagent':
543 self.guest_agent = True
544 elif arg == "qmp":
545 self.qmp = "unix:qmp.sock"
546 elif arg.startswith("qmp="):
547 self.qmp = arg[len('qmp='):]
548 elif arg.startswith('guestagent-sockpath='):
549 self.guest_agent_sockpath = '%s' % arg[len('guestagent-sockpath='):]
516 elif arg.startswith('tcpserial='): 550 elif arg.startswith('tcpserial='):
517 self.tcpserial_portnum = '%s' % arg[len('tcpserial='):] 551 self.tcpserial_portnum = '%s' % arg[len('tcpserial='):]
518 elif arg.startswith('qemuparams='): 552 elif arg.startswith('qemuparams='):
@@ -544,11 +578,18 @@ class BaseConfig(object):
544 self.check_arg_machine(unknown_arg) 578 self.check_arg_machine(unknown_arg)
545 579
546 if not (self.get('DEPLOY_DIR_IMAGE') or self.qbconfload): 580 if not (self.get('DEPLOY_DIR_IMAGE') or self.qbconfload):
547 self.load_bitbake_env() 581 self.load_bitbake_env(target=self.rootfs)
548 s = re.search('^DEPLOY_DIR_IMAGE="(.*)"', self.bitbake_e, re.M) 582 s = re.search('^DEPLOY_DIR_IMAGE="(.*)"', self.bitbake_e, re.M)
549 if s: 583 if s:
550 self.set("DEPLOY_DIR_IMAGE", s.group(1)) 584 self.set("DEPLOY_DIR_IMAGE", s.group(1))
551 585
586 if not self.get('IMAGE_LINK_NAME') and self.rootfs:
587 s = re.search('^IMAGE_LINK_NAME="(.*)"', self.bitbake_e, re.M)
588 if s:
589 image_link_name = s.group(1)
590 self.set("IMAGE_LINK_NAME", image_link_name)
591 logger.debug('Using IMAGE_LINK_NAME = "%s"' % image_link_name)
592
552 def check_kvm(self): 593 def check_kvm(self):
553 """Check kvm and kvm-host""" 594 """Check kvm and kvm-host"""
554 if not (self.kvm_enabled or self.vhost_enabled): 595 if not (self.kvm_enabled or self.vhost_enabled):
@@ -578,11 +619,6 @@ class BaseConfig(object):
578 619
579 if os.access(dev_kvm, os.W_OK|os.R_OK): 620 if os.access(dev_kvm, os.W_OK|os.R_OK):
580 self.qemu_opt_script += ' -enable-kvm' 621 self.qemu_opt_script += ' -enable-kvm'
581 if self.get('MACHINE') == "qemux86":
582 # Workaround for broken APIC window on pre 4.15 host kernels which causes boot hangs
583 # See YOCTO #12301
584 # On 64 bit we use x2apic
585 self.kernel_cmdline_script += " clocksource=kvm-clock hpet=disable noapic nolapic"
586 else: 622 else:
587 logger.error("You have no read or write permission on /dev/kvm.") 623 logger.error("You have no read or write permission on /dev/kvm.")
588 logger.error("Please change the ownership of this file as described at:") 624 logger.error("Please change the ownership of this file as described at:")
@@ -623,10 +659,10 @@ class BaseConfig(object):
623 elif fsflag == 'kernel-in-fs': 659 elif fsflag == 'kernel-in-fs':
624 wic_fs = False 660 wic_fs = False
625 else: 661 else:
626 logger.warn('Unknown flag "%s:%s" in QB_FSINFO', fstype, fsflag) 662 logger.warning('Unknown flag "%s:%s" in QB_FSINFO', fstype, fsflag)
627 continue 663 continue
628 else: 664 else:
629 logger.warn('QB_FSINFO is not supported for image type "%s"', fstype) 665 logger.warning('QB_FSINFO is not supported for image type "%s"', fstype)
630 continue 666 continue
631 667
632 if fstype in self.fsinfo: 668 if fstype in self.fsinfo:
@@ -659,16 +695,16 @@ class BaseConfig(object):
659 695
660 if self.rootfs and not os.path.exists(self.rootfs): 696 if self.rootfs and not os.path.exists(self.rootfs):
661 # Lazy rootfs 697 # Lazy rootfs
662 self.rootfs = "%s/%s-%s.%s" % (self.get('DEPLOY_DIR_IMAGE'), 698 self.rootfs = "%s/%s.%s" % (self.get('DEPLOY_DIR_IMAGE'),
663 self.rootfs, self.get('MACHINE'), 699 self.get('IMAGE_LINK_NAME'),
664 self.fstype) 700 self.fstype)
665 elif not self.rootfs: 701 elif not self.rootfs:
666 cmd_name = '%s/%s*.%s' % (self.get('DEPLOY_DIR_IMAGE'), self.get('IMAGE_NAME'), self.fstype) 702 glob_name = '%s/%s*.%s' % (self.get('DEPLOY_DIR_IMAGE'), self.get('IMAGE_NAME'), self.fstype)
667 cmd_link = '%s/%s*.%s' % (self.get('DEPLOY_DIR_IMAGE'), self.get('IMAGE_LINK_NAME'), self.fstype) 703 glob_link = '%s/%s*.%s' % (self.get('DEPLOY_DIR_IMAGE'), self.get('IMAGE_LINK_NAME'), self.fstype)
668 cmds = (cmd_name, cmd_link) 704 globs = (glob_name, glob_link)
669 self.rootfs = get_first_file(cmds) 705 self.rootfs = get_first_file(globs)
670 if not self.rootfs: 706 if not self.rootfs:
671 raise RunQemuError("Failed to find rootfs: %s or %s" % cmds) 707 raise RunQemuError("Failed to find rootfs: %s or %s" % globs)
672 708
673 if not os.path.exists(self.rootfs): 709 if not os.path.exists(self.rootfs):
674 raise RunQemuError("Can't find rootfs: %s" % self.rootfs) 710 raise RunQemuError("Can't find rootfs: %s" % self.rootfs)
@@ -728,10 +764,10 @@ class BaseConfig(object):
728 kernel_match_name = "%s/%s" % (deploy_dir_image, kernel_name) 764 kernel_match_name = "%s/%s" % (deploy_dir_image, kernel_name)
729 kernel_match_link = "%s/%s" % (deploy_dir_image, self.get('KERNEL_IMAGETYPE')) 765 kernel_match_link = "%s/%s" % (deploy_dir_image, self.get('KERNEL_IMAGETYPE'))
730 kernel_startswith = "%s/%s*" % (deploy_dir_image, self.get('KERNEL_IMAGETYPE')) 766 kernel_startswith = "%s/%s*" % (deploy_dir_image, self.get('KERNEL_IMAGETYPE'))
731 cmds = (kernel_match_name, kernel_match_link, kernel_startswith) 767 globs = (kernel_match_name, kernel_match_link, kernel_startswith)
732 self.kernel = get_first_file(cmds) 768 self.kernel = get_first_file(globs)
733 if not self.kernel: 769 if not self.kernel:
734 raise RunQemuError('KERNEL not found: %s, %s or %s' % cmds) 770 raise RunQemuError('KERNEL not found: %s, %s or %s' % globs)
735 771
736 if not os.path.exists(self.kernel): 772 if not os.path.exists(self.kernel):
737 raise RunQemuError("KERNEL %s not found" % self.kernel) 773 raise RunQemuError("KERNEL %s not found" % self.kernel)
@@ -748,13 +784,13 @@ class BaseConfig(object):
748 dtb = self.get('QB_DTB') 784 dtb = self.get('QB_DTB')
749 if dtb: 785 if dtb:
750 deploy_dir_image = self.get('DEPLOY_DIR_IMAGE') 786 deploy_dir_image = self.get('DEPLOY_DIR_IMAGE')
751 cmd_match = "%s/%s" % (deploy_dir_image, dtb) 787 glob_match = "%s/%s" % (deploy_dir_image, dtb)
752 cmd_startswith = "%s/%s*" % (deploy_dir_image, dtb) 788 glob_startswith = "%s/%s*" % (deploy_dir_image, dtb)
753 cmd_wild = "%s/*.dtb" % deploy_dir_image 789 glob_wild = "%s/*.dtb" % deploy_dir_image
754 cmds = (cmd_match, cmd_startswith, cmd_wild) 790 globs = (glob_match, glob_startswith, glob_wild)
755 self.dtb = get_first_file(cmds) 791 self.dtb = get_first_file(globs)
756 if not os.path.exists(self.dtb): 792 if not os.path.exists(self.dtb):
757 raise RunQemuError('DTB not found: %s, %s or %s' % cmds) 793 raise RunQemuError('DTB not found: %s, %s or %s' % globs)
758 794
759 def check_bios(self): 795 def check_bios(self):
760 """Check and set bios""" 796 """Check and set bios"""
@@ -805,7 +841,7 @@ class BaseConfig(object):
805 self.set('QB_MEM', qb_mem) 841 self.set('QB_MEM', qb_mem)
806 842
807 mach = self.get('MACHINE') 843 mach = self.get('MACHINE')
808 if not mach.startswith('qemumips'): 844 if not mach.startswith(('qemumips', 'qemux86', 'qemuloongarch64')):
809 self.kernel_cmdline_script += ' mem=%s' % self.get('QB_MEM').replace('-m','').strip() + 'M' 845 self.kernel_cmdline_script += ' mem=%s' % self.get('QB_MEM').replace('-m','').strip() + 'M'
810 846
811 self.qemu_opt_script += ' %s' % self.get('QB_MEM') 847 self.qemu_opt_script += ' %s' % self.get('QB_MEM')
@@ -817,11 +853,11 @@ class BaseConfig(object):
817 if self.get('QB_TCPSERIAL_OPT'): 853 if self.get('QB_TCPSERIAL_OPT'):
818 self.qemu_opt_script += ' ' + self.get('QB_TCPSERIAL_OPT').replace('@PORT@', port) 854 self.qemu_opt_script += ' ' + self.get('QB_TCPSERIAL_OPT').replace('@PORT@', port)
819 else: 855 else:
820 self.qemu_opt_script += ' -serial tcp:127.0.0.1:%s' % port 856 self.qemu_opt_script += ' -serial tcp:127.0.0.1:%s,nodelay=on' % port
821 857
822 if len(ports) > 1: 858 if len(ports) > 1:
823 for port in ports[1:]: 859 for port in ports[1:]:
824 self.qemu_opt_script += ' -serial tcp:127.0.0.1:%s' % port 860 self.qemu_opt_script += ' -serial tcp:127.0.0.1:%s,nodelay=on' % port
825 861
826 def check_and_set(self): 862 def check_and_set(self):
827 """Check configs sanity and set when needed""" 863 """Check configs sanity and set when needed"""
@@ -864,8 +900,10 @@ class BaseConfig(object):
864 machine = self.get('MACHINE') 900 machine = self.get('MACHINE')
865 if not machine: 901 if not machine:
866 machine = os.path.basename(deploy_dir_image) 902 machine = os.path.basename(deploy_dir_image)
867 self.qemuboot = "%s/%s-%s.qemuboot.conf" % (deploy_dir_image, 903 if not self.get('IMAGE_LINK_NAME'):
868 self.rootfs, machine) 904 raise RunQemuError("IMAGE_LINK_NAME wasn't set to find corresponding .qemuboot.conf file")
905 self.qemuboot = "%s/%s.qemuboot.conf" % (deploy_dir_image,
906 self.get('IMAGE_LINK_NAME'))
869 else: 907 else:
870 cmd = 'ls -t %s/*.qemuboot.conf' % deploy_dir_image 908 cmd = 'ls -t %s/*.qemuboot.conf' % deploy_dir_image
871 logger.debug('Running %s...' % cmd) 909 logger.debug('Running %s...' % cmd)
@@ -986,19 +1024,16 @@ class BaseConfig(object):
986 if self.slirp_enabled: 1024 if self.slirp_enabled:
987 self.nfs_server = '10.0.2.2' 1025 self.nfs_server = '10.0.2.2'
988 else: 1026 else:
989 self.nfs_server = '192.168.7.1' 1027 self.nfs_server = '192.168.7.@GATEWAY@'
990
991 # Figure out a new nfs_instance to allow multiple qemus running.
992 ps = subprocess.check_output(("ps", "auxww")).decode('utf-8')
993 pattern = '/bin/unfsd .* -i .*\.pid -e .*/exports([0-9]+) '
994 all_instances = re.findall(pattern, ps, re.M)
995 if all_instances:
996 all_instances.sort(key=int)
997 self.nfs_instance = int(all_instances.pop()) + 1
998 1028
999 nfsd_port = 3049 + 2 * self.nfs_instance 1029 nfsd_port = 3048 + self.nfs_instance
1000 mountd_port = 3048 + 2 * self.nfs_instance 1030 lockdir = "/tmp/qemu-port-locks"
1031 self.make_lock_dir(lockdir)
1032 while not self.check_free_port('localhost', nfsd_port, lockdir):
1033 self.nfs_instance += 1
1034 nfsd_port += 1
1001 1035
1036 mountd_port = nfsd_port
1002 # Export vars for runqemu-export-rootfs 1037 # Export vars for runqemu-export-rootfs
1003 export_dict = { 1038 export_dict = {
1004 'NFS_INSTANCE': self.nfs_instance, 1039 'NFS_INSTANCE': self.nfs_instance,
@@ -1009,7 +1044,11 @@ class BaseConfig(object):
1009 # Use '%s' since they are integers 1044 # Use '%s' since they are integers
1010 os.putenv(k, '%s' % v) 1045 os.putenv(k, '%s' % v)
1011 1046
1012 self.unfs_opts="nfsvers=3,port=%s,tcp,mountport=%s" % (nfsd_port, mountd_port) 1047 qb_nfsrootfs_extra_opt = self.get("QB_NFSROOTFS_EXTRA_OPT")
1048 if qb_nfsrootfs_extra_opt and not qb_nfsrootfs_extra_opt.startswith(","):
1049 qb_nfsrootfs_extra_opt = "," + qb_nfsrootfs_extra_opt
1050
1051 self.unfs_opts="nfsvers=3,port=%s,tcp,mountport=%s%s" % (nfsd_port, mountd_port, qb_nfsrootfs_extra_opt)
1013 1052
1014 # Extract .tar.bz2 or .tar.bz if no nfs dir 1053 # Extract .tar.bz2 or .tar.bz if no nfs dir
1015 if not (self.rootfs and os.path.isdir(self.rootfs)): 1054 if not (self.rootfs and os.path.isdir(self.rootfs)):
@@ -1032,7 +1071,7 @@ class BaseConfig(object):
1032 cmd = ('runqemu-extract-sdk', src, dest) 1071 cmd = ('runqemu-extract-sdk', src, dest)
1033 logger.info('Running %s...' % str(cmd)) 1072 logger.info('Running %s...' % str(cmd))
1034 if subprocess.call(cmd) != 0: 1073 if subprocess.call(cmd) != 0:
1035 raise RunQemuError('Failed to run %s' % cmd) 1074 raise RunQemuError('Failed to run %s' % str(cmd))
1036 self.rootfs = dest 1075 self.rootfs = dest
1037 self.cleanup_files.append(self.rootfs) 1076 self.cleanup_files.append(self.rootfs)
1038 self.cleanup_files.append('%s.pseudo_state' % self.rootfs) 1077 self.cleanup_files.append('%s.pseudo_state' % self.rootfs)
@@ -1041,14 +1080,32 @@ class BaseConfig(object):
1041 cmd = ('runqemu-export-rootfs', 'start', self.rootfs) 1080 cmd = ('runqemu-export-rootfs', 'start', self.rootfs)
1042 logger.info('Running %s...' % str(cmd)) 1081 logger.info('Running %s...' % str(cmd))
1043 if subprocess.call(cmd) != 0: 1082 if subprocess.call(cmd) != 0:
1044 raise RunQemuError('Failed to run %s' % cmd) 1083 raise RunQemuError('Failed to run %s' % str(cmd))
1045 1084
1046 self.nfs_running = True 1085 self.nfs_running = True
1047 1086
1087 def setup_cmd(self):
1088 cmd = self.get('QB_SETUP_CMD')
1089 if cmd != '':
1090 logger.info('Running setup command %s' % str(cmd))
1091 if subprocess.call(cmd, shell=True) != 0:
1092 raise RunQemuError('Failed to run %s' % str(cmd))
1093
1048 def setup_net_bridge(self): 1094 def setup_net_bridge(self):
1049 self.set('NETWORK_CMD', '-netdev bridge,br=%s,id=net0,helper=%s -device virtio-net-pci,netdev=net0 ' % ( 1095 self.set('NETWORK_CMD', '-netdev bridge,br=%s,id=net0,helper=%s -device virtio-net-pci,netdev=net0 ' % (
1050 self.net_bridge, os.path.join(self.bindir_native, 'qemu-oe-bridge-helper'))) 1096 self.net_bridge, os.path.join(self.bindir_native, 'qemu-oe-bridge-helper')))
1051 1097
1098 def make_lock_dir(self, lockdir):
1099 if not os.path.exists(lockdir):
1100 # There might be a race issue when multi runqemu processess are
1101 # running at the same time.
1102 try:
1103 os.mkdir(lockdir)
1104 os.chmod(lockdir, 0o777)
1105 except FileExistsError:
1106 pass
1107 return
1108
1052 def setup_slirp(self): 1109 def setup_slirp(self):
1053 """Setup user networking""" 1110 """Setup user networking"""
1054 1111
@@ -1058,7 +1115,7 @@ class BaseConfig(object):
1058 logger.info("Network configuration:%s", netconf) 1115 logger.info("Network configuration:%s", netconf)
1059 self.kernel_cmdline_script += netconf 1116 self.kernel_cmdline_script += netconf
1060 # Port mapping 1117 # Port mapping
1061 hostfwd = ",hostfwd=tcp::2222-:22,hostfwd=tcp::2323-:23" 1118 hostfwd = ",hostfwd=tcp:127.0.0.1:2222-:22,hostfwd=tcp:127.0.0.1:2323-:23"
1062 qb_slirp_opt_default = "-netdev user,id=net0%s,tftp=%s" % (hostfwd, self.get('DEPLOY_DIR_IMAGE')) 1119 qb_slirp_opt_default = "-netdev user,id=net0%s,tftp=%s" % (hostfwd, self.get('DEPLOY_DIR_IMAGE'))
1063 qb_slirp_opt = self.get('QB_SLIRP_OPT') or qb_slirp_opt_default 1120 qb_slirp_opt = self.get('QB_SLIRP_OPT') or qb_slirp_opt_default
1064 # Figure out the port 1121 # Figure out the port
@@ -1067,14 +1124,7 @@ class BaseConfig(object):
1067 mac = 2 1124 mac = 2
1068 1125
1069 lockdir = "/tmp/qemu-port-locks" 1126 lockdir = "/tmp/qemu-port-locks"
1070 if not os.path.exists(lockdir): 1127 self.make_lock_dir(lockdir)
1071 # There might be a race issue when multi runqemu processess are
1072 # running at the same time.
1073 try:
1074 os.mkdir(lockdir)
1075 os.chmod(lockdir, 0o777)
1076 except FileExistsError:
1077 pass
1078 1128
1079 # Find a free port to avoid conflicts 1129 # Find a free port to avoid conflicts
1080 for p in ports[:]: 1130 for p in ports[:]:
@@ -1114,20 +1164,17 @@ class BaseConfig(object):
1114 logger.error("ip: %s" % ip) 1164 logger.error("ip: %s" % ip)
1115 raise OEPathError("runqemu-ifup, runqemu-ifdown or ip not found") 1165 raise OEPathError("runqemu-ifup, runqemu-ifdown or ip not found")
1116 1166
1117 if not os.path.exists(lockdir): 1167 self.make_lock_dir(lockdir)
1118 # There might be a race issue when multi runqemu processess are
1119 # running at the same time.
1120 try:
1121 os.mkdir(lockdir)
1122 os.chmod(lockdir, 0o777)
1123 except FileExistsError:
1124 pass
1125 1168
1126 cmd = (ip, 'link') 1169 cmd = (ip, 'link')
1127 logger.debug('Running %s...' % str(cmd)) 1170 logger.debug('Running %s...' % str(cmd))
1128 ip_link = subprocess.check_output(cmd).decode('utf-8') 1171 ip_link = subprocess.check_output(cmd).decode('utf-8')
1129 # Matches line like: 6: tap0: <foo> 1172 # Matches line like: 6: tap0: <foo>
1130 possibles = re.findall('^[0-9]+: +(tap[0-9]+): <.*', ip_link, re.M) 1173 oe_tap_name = 'tap'
1174 if 'OE_TAP_NAME' in os.environ:
1175 oe_tap_name = os.environ['OE_TAP_NAME']
1176 tap_re = '^[0-9]+: +(' + oe_tap_name + '[0-9]+): <.*'
1177 possibles = re.findall(tap_re, ip_link, re.M)
1131 tap = "" 1178 tap = ""
1132 for p in possibles: 1179 for p in possibles:
1133 lockfile = os.path.join(lockdir, p) 1180 lockfile = os.path.join(lockdir, p)
@@ -1148,25 +1195,28 @@ class BaseConfig(object):
1148 raise RunQemuError("a new one with sudo.") 1195 raise RunQemuError("a new one with sudo.")
1149 1196
1150 gid = os.getgid() 1197 gid = os.getgid()
1151 uid = os.getuid()
1152 logger.info("Setting up tap interface under sudo") 1198 logger.info("Setting up tap interface under sudo")
1153 cmd = ('sudo', self.qemuifup, str(uid), str(gid), self.bindir_native) 1199 cmd = ('sudo', self.qemuifup, str(gid))
1154 try: 1200 for _ in range(5):
1155 tap = subprocess.check_output(cmd).decode('utf-8').strip() 1201 try:
1156 except subprocess.CalledProcessError as e: 1202 tap = subprocess.check_output(cmd).decode('utf-8').strip()
1157 logger.error('Setting up tap device failed:\n%s\nRun runqemu-gen-tapdevs to manually create one.' % str(e)) 1203 except subprocess.CalledProcessError as e:
1158 sys.exit(1) 1204 logger.error('Setting up tap device failed:\n%s\nRun runqemu-gen-tapdevs to manually create one.' % str(e))
1159 lockfile = os.path.join(lockdir, tap) 1205 sys.exit(1)
1160 self.taplock = lockfile + '.lock' 1206 lockfile = os.path.join(lockdir, tap)
1161 self.acquire_taplock() 1207 self.taplock = lockfile + '.lock'
1162 self.cleantap = True 1208 if self.acquire_taplock():
1163 logger.debug('Created tap: %s' % tap) 1209 self.cleantap = True
1210 logger.debug('Created tap: %s' % tap)
1211 break
1212 else:
1213 tap = None
1164 1214
1165 if not tap: 1215 if not tap:
1166 logger.error("Failed to setup tap device. Run runqemu-gen-tapdevs to manually create.") 1216 logger.error("Failed to setup tap device. Run runqemu-gen-tapdevs to manually create.")
1167 sys.exit(1) 1217 sys.exit(1)
1168 self.tap = tap 1218 self.tap = tap
1169 tapnum = int(tap[3:]) 1219 tapnum = int(tap[len(oe_tap_name):])
1170 gateway = tapnum * 2 + 1 1220 gateway = tapnum * 2 + 1
1171 client = gateway + 1 1221 client = gateway + 1
1172 if self.fstype == 'nfs': 1222 if self.fstype == 'nfs':
@@ -1174,6 +1224,7 @@ class BaseConfig(object):
1174 netconf = " " + self.cmdline_ip_tap 1224 netconf = " " + self.cmdline_ip_tap
1175 netconf = netconf.replace('@CLIENT@', str(client)) 1225 netconf = netconf.replace('@CLIENT@', str(client))
1176 netconf = netconf.replace('@GATEWAY@', str(gateway)) 1226 netconf = netconf.replace('@GATEWAY@', str(gateway))
1227 self.nfs_server = self.nfs_server.replace('@GATEWAY@', str(gateway))
1177 logger.info("Network configuration:%s", netconf) 1228 logger.info("Network configuration:%s", netconf)
1178 self.kernel_cmdline_script += netconf 1229 self.kernel_cmdline_script += netconf
1179 mac = "%s%02x" % (self.mac_tap, client) 1230 mac = "%s%02x" % (self.mac_tap, client)
@@ -1189,7 +1240,8 @@ class BaseConfig(object):
1189 self.set('NETWORK_CMD', '%s %s' % (self.network_device.replace('@MAC@', mac), qemu_tap_opt)) 1240 self.set('NETWORK_CMD', '%s %s' % (self.network_device.replace('@MAC@', mac), qemu_tap_opt))
1190 1241
1191 def setup_network(self): 1242 def setup_network(self):
1192 if self.get('QB_NET') == 'none': 1243 if self.nonetwork or self.get('QB_NET') == 'none':
1244 self.set('NETWORK_CMD', '-nic none')
1193 return 1245 return
1194 if sys.stdin.isatty(): 1246 if sys.stdin.isatty():
1195 self.saved_stty = subprocess.check_output(("stty", "-g")).decode('utf-8').strip() 1247 self.saved_stty = subprocess.check_output(("stty", "-g")).decode('utf-8').strip()
@@ -1249,6 +1301,10 @@ class BaseConfig(object):
1249 elif drive_type.startswith("/dev/hd"): 1301 elif drive_type.startswith("/dev/hd"):
1250 logger.info('Using ide drive') 1302 logger.info('Using ide drive')
1251 vm_drive = "-drive file=%s,format=%s" % (self.rootfs, rootfs_format) 1303 vm_drive = "-drive file=%s,format=%s" % (self.rootfs, rootfs_format)
1304 elif drive_type.startswith("/dev/mmcblk"):
1305 logger.info('Using sdcard drive')
1306 vm_drive = '-drive id=sdcard0,if=none,file=%s,format=%s -device sdhci-pci -device sd-card,drive=sdcard0' \
1307 % (self.rootfs, rootfs_format)
1252 elif drive_type.startswith("/dev/vdb"): 1308 elif drive_type.startswith("/dev/vdb"):
1253 logger.info('Using block virtio drive'); 1309 logger.info('Using block virtio drive');
1254 vm_drive = '-drive id=disk0,file=%s,if=none,format=%s -device virtio-blk-device,drive=disk0%s' \ 1310 vm_drive = '-drive id=disk0,file=%s,if=none,format=%s -device virtio-blk-device,drive=disk0%s' \
@@ -1288,7 +1344,7 @@ class BaseConfig(object):
1288 """attempt to determine the appropriate qemu-system binary""" 1344 """attempt to determine the appropriate qemu-system binary"""
1289 mach = self.get('MACHINE') 1345 mach = self.get('MACHINE')
1290 if not mach: 1346 if not mach:
1291 search = '.*(qemux86-64|qemux86|qemuarm64|qemuarm|qemumips64|qemumips64el|qemumipsel|qemumips|qemuppc).*' 1347 search = '.*(qemux86-64|qemux86|qemuarm64|qemuarm|qemuloongarch64|qemumips64|qemumips64el|qemumipsel|qemumips|qemuppc).*'
1292 if self.rootfs: 1348 if self.rootfs:
1293 match = re.match(search, self.rootfs) 1349 match = re.match(search, self.rootfs)
1294 if match: 1350 if match:
@@ -1311,6 +1367,8 @@ class BaseConfig(object):
1311 qbsys = 'x86_64' 1367 qbsys = 'x86_64'
1312 elif mach == 'qemuppc': 1368 elif mach == 'qemuppc':
1313 qbsys = 'ppc' 1369 qbsys = 'ppc'
1370 elif mach == 'qemuloongarch64':
1371 qbsys = 'loongarch64'
1314 elif mach == 'qemumips': 1372 elif mach == 'qemumips':
1315 qbsys = 'mips' 1373 qbsys = 'mips'
1316 elif mach == 'qemumips64': 1374 elif mach == 'qemumips64':
@@ -1339,6 +1397,35 @@ class BaseConfig(object):
1339 raise RunQemuError("Failed to boot, QB_SYSTEM_NAME is NULL!") 1397 raise RunQemuError("Failed to boot, QB_SYSTEM_NAME is NULL!")
1340 self.qemu_system = qemu_system 1398 self.qemu_system = qemu_system
1341 1399
1400 def check_render_nodes(self):
1401 render_hint = """If /dev/dri/renderD* is absent due to lack of suitable GPU, 'modprobe vgem' will create one suitable for mesa llvmpipe software renderer."""
1402 try:
1403 content = os.listdir("/dev/dri")
1404 nodes = [i for i in content if i.startswith('renderD')]
1405 if len(nodes) == 0:
1406 raise RunQemuError("No render nodes found in /dev/dri/: %s. %s" %(content, render_hint))
1407 for n in nodes:
1408 try:
1409 with open(os.path.join("/dev/dri", n), "w") as f:
1410 f.close()
1411 break
1412 except IOError:
1413 pass
1414 else:
1415 raise RunQemuError("None of the render nodes in /dev/dri/ are accessible: %s; you may need to add yourself to 'render' group or otherwise ensure you have read-write permissions on one of them." %(nodes))
1416 except FileNotFoundError:
1417 raise RunQemuError("/dev/dri directory does not exist; no render nodes available on this machine. %s" %(render_hint))
1418
1419 def setup_guest_agent(self):
1420 if self.guest_agent == True:
1421 self.qemu_opt += ' -chardev socket,path=' + self.guest_agent_sockpath + ',server,nowait,id=qga0 '
1422 self.qemu_opt += ' -device virtio-serial '
1423 self.qemu_opt += ' -device virtserialport,chardev=qga0,name=org.qemu.guest_agent.0 '
1424
1425 def setup_qmp(self):
1426 if self.qmp:
1427 self.qemu_opt += " -qmp %s,server,nowait" % self.qmp
1428
1342 def setup_vga(self): 1429 def setup_vga(self):
1343 if self.nographic == True: 1430 if self.nographic == True:
1344 if self.sdl == True: 1431 if self.sdl == True:
@@ -1354,27 +1441,43 @@ class BaseConfig(object):
1354 if (self.gl_es == True or self.gl == True) and (self.sdl == False and self.gtk == False): 1441 if (self.gl_es == True or self.gl == True) and (self.sdl == False and self.gtk == False):
1355 raise RunQemuError('Option gl/gl-es needs gtk or sdl option.') 1442 raise RunQemuError('Option gl/gl-es needs gtk or sdl option.')
1356 1443
1357 if self.sdl == True or self.gtk == True or self.egl_headless == True: 1444 # If we have no display option, we autodetect based upon what qemu supports. We
1358 if self.gl or self.gl_es or self.egl_headless: 1445 # need our font setup and show-cusor below so we need to see what qemu --help says
1359 self.qemu_opt += ' -device virtio-vga-gl ' 1446 # is supported so we can pass our correct config in.
1447 if not self.nographic and not self.sdl and not self.gtk and not self.publicvnc and not self.egl_headless == True:
1448 output = subprocess.check_output([self.qemu_bin, "--help"], universal_newlines=True, env=self.qemu_environ)
1449 if "-display gtk" in output:
1450 self.gtk = True
1451 elif "-display sdl" in output:
1452 self.sdl = True
1360 else: 1453 else:
1361 self.qemu_opt += ' -device virtio-vga ' 1454 self.qemu_opt += ' -display none'
1455
1456 if self.sdl == True or self.gtk == True or self.egl_headless == True:
1457
1458 if self.qemu_system.endswith(('i386', 'x86_64')):
1459 if self.gl or self.gl_es or self.egl_headless:
1460 self.qemu_opt += ' -device virtio-vga-gl '
1461 else:
1462 self.qemu_opt += ' -device virtio-vga '
1362 1463
1363 self.qemu_opt += '-display ' 1464 self.qemu_opt += ' -display '
1364 if self.egl_headless == True: 1465 if self.egl_headless == True:
1365 self.set_dri_path() 1466 self.check_render_nodes()
1467 self.set_mesa_paths()
1366 self.qemu_opt += 'egl-headless,' 1468 self.qemu_opt += 'egl-headless,'
1367 else: 1469 else:
1368 if self.sdl == True: 1470 if self.sdl == True:
1369 self.qemu_opt += 'sdl,' 1471 self.qemu_opt += 'sdl,'
1370 elif self.gtk == True: 1472 elif self.gtk == True:
1473 self.qemu_environ['FONTCONFIG_PATH'] = '/etc/fonts'
1371 self.qemu_opt += 'gtk,' 1474 self.qemu_opt += 'gtk,'
1372 1475
1373 if self.gl == True: 1476 if self.gl == True:
1374 self.set_dri_path() 1477 self.set_mesa_paths()
1375 self.qemu_opt += 'gl=on,' 1478 self.qemu_opt += 'gl=on,'
1376 elif self.gl_es == True: 1479 elif self.gl_es == True:
1377 self.set_dri_path() 1480 self.set_mesa_paths()
1378 self.qemu_opt += 'gl=es,' 1481 self.qemu_opt += 'gl=es,'
1379 self.qemu_opt += 'show-cursor=on' 1482 self.qemu_opt += 'show-cursor=on'
1380 1483
@@ -1386,6 +1489,19 @@ class BaseConfig(object):
1386 for entry in self.get('SERIAL_CONSOLES').split(' '): 1489 for entry in self.get('SERIAL_CONSOLES').split(' '):
1387 self.kernel_cmdline_script += ' console=%s' %entry.split(';')[1] 1490 self.kernel_cmdline_script += ' console=%s' %entry.split(';')[1]
1388 1491
1492 # We always wants ttyS0 and ttyS1 in qemu machines (see SERIAL_CONSOLES).
1493 # If no serial or serialtcp options were specified, only ttyS0 is created
1494 # and sysvinit shows an error trying to enable ttyS1:
1495 # INIT: Id "S1" respawning too fast: disabled for 5 minutes
1496 serial_num = len(re.findall("(^| )-serial ", self.qemu_opt))
1497
1498 # Assume if the user passed serial options, they know what they want
1499 # and pad to two devices
1500 if serial_num == 1:
1501 self.qemu_opt += " -serial null"
1502 elif serial_num >= 2:
1503 return
1504
1389 if self.serialstdio == True or self.nographic == True: 1505 if self.serialstdio == True or self.nographic == True:
1390 self.qemu_opt += " -serial mon:stdio" 1506 self.qemu_opt += " -serial mon:stdio"
1391 else: 1507 else:
@@ -1397,15 +1513,11 @@ class BaseConfig(object):
1397 1513
1398 self.qemu_opt += " %s" % self.get("QB_SERIAL_OPT") 1514 self.qemu_opt += " %s" % self.get("QB_SERIAL_OPT")
1399 1515
1400 # We always wants ttyS0 and ttyS1 in qemu machines (see SERIAL_CONSOLES). 1516 serial_num = len(re.findall("(^| )-serial ", self.qemu_opt))
1401 # If no serial or serialtcp options were specified, only ttyS0 is created
1402 # and sysvinit shows an error trying to enable ttyS1:
1403 # INIT: Id "S1" respawning too fast: disabled for 5 minutes
1404 serial_num = len(re.findall("-serial", self.qemu_opt))
1405 if serial_num < 2: 1517 if serial_num < 2:
1406 self.qemu_opt += " -serial null" 1518 self.qemu_opt += " -serial null"
1407 1519
1408 def setup_final(self): 1520 def find_qemu(self):
1409 qemu_bin = os.path.join(self.bindir_native, self.qemu_system) 1521 qemu_bin = os.path.join(self.bindir_native, self.qemu_system)
1410 1522
1411 # It is possible to have qemu-native in ASSUME_PROVIDED, and it won't 1523 # It is possible to have qemu-native in ASSUME_PROVIDED, and it won't
@@ -1424,8 +1536,13 @@ class BaseConfig(object):
1424 1536
1425 if not os.access(qemu_bin, os.X_OK): 1537 if not os.access(qemu_bin, os.X_OK):
1426 raise OEPathError("No QEMU binary '%s' could be found" % qemu_bin) 1538 raise OEPathError("No QEMU binary '%s' could be found" % qemu_bin)
1539 self.qemu_bin = qemu_bin
1540
1541 def setup_final(self):
1427 1542
1428 self.qemu_opt = "%s %s %s %s %s" % (qemu_bin, self.get('NETWORK_CMD'), self.get('QB_RNG'), self.get('ROOTFS_OPTIONS'), self.get('QB_OPT_APPEND').replace('@DEPLOY_DIR_IMAGE@', self.get('DEPLOY_DIR_IMAGE'))) 1543 self.find_qemu()
1544
1545 self.qemu_opt = "%s %s %s %s %s" % (self.qemu_bin, self.get('NETWORK_CMD'), self.get('QB_RNG'), self.get('ROOTFS_OPTIONS'), self.get('QB_OPT_APPEND').replace('@DEPLOY_DIR_IMAGE@', self.get('DEPLOY_DIR_IMAGE')))
1429 1546
1430 for ovmf in self.ovmf_bios: 1547 for ovmf in self.ovmf_bios:
1431 format = ovmf.rsplit('.', 1)[-1] 1548 format = ovmf.rsplit('.', 1)[-1]
@@ -1449,13 +1566,20 @@ class BaseConfig(object):
1449 if self.snapshot: 1566 if self.snapshot:
1450 self.qemu_opt += " -snapshot" 1567 self.qemu_opt += " -snapshot"
1451 1568
1569 self.setup_guest_agent()
1570 self.setup_qmp()
1452 self.setup_serial() 1571 self.setup_serial()
1453 self.setup_vga() 1572 self.setup_vga()
1454 1573
1455 def start_qemu(self): 1574 def start_qemu(self):
1456 import shlex 1575 import shlex
1457 if self.kernel: 1576 if self.kernel:
1458 kernel_opts = "-kernel %s -append '%s %s %s %s'" % (self.kernel, self.kernel_cmdline, 1577 kernel_opts = "-kernel %s" % (self.kernel)
1578 if self.get('QB_KERNEL_CMDLINE') == "none":
1579 if self.bootparams:
1580 kernel_opts += " -append '%s'" % (self.bootparams)
1581 else:
1582 kernel_opts += " -append '%s %s %s %s'" % (self.kernel_cmdline,
1459 self.kernel_cmdline_script, self.get('QB_KERNEL_CMDLINE_APPEND'), 1583 self.kernel_cmdline_script, self.get('QB_KERNEL_CMDLINE_APPEND'),
1460 self.bootparams) 1584 self.bootparams)
1461 if self.dtb: 1585 if self.dtb:
@@ -1469,14 +1593,17 @@ class BaseConfig(object):
1469 cmd = "%s %s" % (self.qemu_opt, kernel_opts) 1593 cmd = "%s %s" % (self.qemu_opt, kernel_opts)
1470 cmds = shlex.split(cmd) 1594 cmds = shlex.split(cmd)
1471 logger.info('Running %s\n' % cmd) 1595 logger.info('Running %s\n' % cmd)
1596 with open('/proc/uptime', 'r') as f:
1597 uptime_seconds = f.readline().split()[0]
1598 logger.info('Host uptime: %s\n' % uptime_seconds)
1472 pass_fds = [] 1599 pass_fds = []
1473 if self.taplock_descriptor: 1600 if self.taplock_descriptor:
1474 pass_fds = [self.taplock_descriptor.fileno()] 1601 pass_fds = [self.taplock_descriptor.fileno()]
1475 if len(self.portlocks): 1602 if len(self.portlocks):
1476 for descriptor in self.portlocks.values(): 1603 for descriptor in self.portlocks.values():
1477 pass_fds.append(descriptor.fileno()) 1604 pass_fds.append(descriptor.fileno())
1478 process = subprocess.Popen(cmds, stderr=subprocess.PIPE, pass_fds=pass_fds) 1605 process = subprocess.Popen(cmds, stderr=subprocess.PIPE, pass_fds=pass_fds, env=self.qemu_environ)
1479 self.qemupid = process.pid 1606 self.qemuprocess = process
1480 retcode = process.wait() 1607 retcode = process.wait()
1481 if retcode: 1608 if retcode:
1482 if retcode == -signal.SIGTERM: 1609 if retcode == -signal.SIGTERM:
@@ -1484,6 +1611,13 @@ class BaseConfig(object):
1484 else: 1611 else:
1485 logger.error("Failed to run qemu: %s", process.stderr.read().decode()) 1612 logger.error("Failed to run qemu: %s", process.stderr.read().decode())
1486 1613
1614 def cleanup_cmd(self):
1615 cmd = self.get('QB_CLEANUP_CMD')
1616 if cmd != '':
1617 logger.info('Running cleanup command %s' % str(cmd))
1618 if subprocess.call(cmd, shell=True) != 0:
1619 raise RunQemuError('Failed to run %s' % str(cmd))
1620
1487 def cleanup(self): 1621 def cleanup(self):
1488 if self.cleaned: 1622 if self.cleaned:
1489 return 1623 return
@@ -1492,18 +1626,30 @@ class BaseConfig(object):
1492 signal.signal(signal.SIGTERM, signal.SIG_IGN) 1626 signal.signal(signal.SIGTERM, signal.SIG_IGN)
1493 1627
1494 logger.info("Cleaning up") 1628 logger.info("Cleaning up")
1629
1630 if self.qemuprocess:
1631 try:
1632 # give it some time to shut down, ignore return values and output
1633 self.qemuprocess.send_signal(signal.SIGTERM)
1634 self.qemuprocess.communicate(timeout=5)
1635 except subprocess.TimeoutExpired:
1636 self.qemuprocess.kill()
1637
1638 with open('/proc/uptime', 'r') as f:
1639 uptime_seconds = f.readline().split()[0]
1640 logger.info('Host uptime: %s\n' % uptime_seconds)
1495 if self.cleantap: 1641 if self.cleantap:
1496 cmd = ('sudo', self.qemuifdown, self.tap, self.bindir_native) 1642 cmd = ('sudo', self.qemuifdown, self.tap)
1497 logger.debug('Running %s' % str(cmd)) 1643 logger.debug('Running %s' % str(cmd))
1498 subprocess.check_call(cmd) 1644 subprocess.check_call(cmd)
1499 self.release_taplock() 1645 self.release_taplock()
1500 self.release_portlock()
1501 1646
1502 if self.nfs_running: 1647 if self.nfs_running:
1503 logger.info("Shutting down the userspace NFS server...") 1648 logger.info("Shutting down the userspace NFS server...")
1504 cmd = ("runqemu-export-rootfs", "stop", self.rootfs) 1649 cmd = ("runqemu-export-rootfs", "stop", self.rootfs)
1505 logger.debug('Running %s' % str(cmd)) 1650 logger.debug('Running %s' % str(cmd))
1506 subprocess.check_call(cmd) 1651 subprocess.check_call(cmd)
1652 self.release_portlock()
1507 1653
1508 if self.saved_stty: 1654 if self.saved_stty:
1509 subprocess.check_call(("stty", self.saved_stty)) 1655 subprocess.check_call(("stty", self.saved_stty))
@@ -1516,9 +1662,12 @@ class BaseConfig(object):
1516 else: 1662 else:
1517 shutil.rmtree(ent) 1663 shutil.rmtree(ent)
1518 1664
1665 # Deliberately ignore the return code of 'tput smam'.
1666 subprocess.call(["tput", "smam"])
1667
1519 self.cleaned = True 1668 self.cleaned = True
1520 1669
1521 def run_bitbake_env(self, mach=None): 1670 def run_bitbake_env(self, mach=None, target=''):
1522 bitbake = shutil.which('bitbake') 1671 bitbake = shutil.which('bitbake')
1523 if not bitbake: 1672 if not bitbake:
1524 return 1673 return
@@ -1530,23 +1679,37 @@ class BaseConfig(object):
1530 if multiconfig: 1679 if multiconfig:
1531 multiconfig = "mc:%s" % multiconfig 1680 multiconfig = "mc:%s" % multiconfig
1532 1681
1682 if self.rootfs and not target:
1683 target = self.rootfs
1684
1533 if mach: 1685 if mach:
1534 cmd = 'MACHINE=%s bitbake -e %s' % (mach, multiconfig) 1686 cmd = 'MACHINE=%s bitbake -e %s %s' % (mach, multiconfig, target)
1535 else: 1687 else:
1536 cmd = 'bitbake -e %s' % multiconfig 1688 cmd = 'bitbake -e %s %s' % (multiconfig, target)
1537 1689
1538 logger.info('Running %s...' % cmd) 1690 logger.info('Running %s...' % cmd)
1539 return subprocess.check_output(cmd, shell=True).decode('utf-8') 1691 try:
1692 return subprocess.check_output(cmd, shell=True).decode('utf-8')
1693 except subprocess.CalledProcessError as err:
1694 logger.warning("Couldn't run '%s' to gather environment information, maybe the target wasn't an image name, will retry with virtual/kernel as a target:\n%s" % (cmd, err.output.decode('utf-8')))
1695 # need something with IMAGE_NAME_SUFFIX/IMAGE_LINK_NAME defined (kernel also inherits image-artifact-names.bbclass)
1696 target = 'virtual/kernel'
1697 if mach:
1698 cmd = 'MACHINE=%s bitbake -e %s %s' % (mach, multiconfig, target)
1699 else:
1700 cmd = 'bitbake -e %s %s' % (multiconfig, target)
1701 try:
1702 return subprocess.check_output(cmd, shell=True).decode('utf-8')
1703 except subprocess.CalledProcessError as err:
1704 logger.warning("Couldn't run '%s' to gather environment information, giving up with 'bitbake -e':\n%s" % (cmd, err.output.decode('utf-8')))
1705 return ''
1540 1706
1541 def load_bitbake_env(self, mach=None): 1707
1708 def load_bitbake_env(self, mach=None, target=None):
1542 if self.bitbake_e: 1709 if self.bitbake_e:
1543 return 1710 return
1544 1711
1545 try: 1712 self.bitbake_e = self.run_bitbake_env(mach=mach, target=target)
1546 self.bitbake_e = self.run_bitbake_env(mach=mach)
1547 except subprocess.CalledProcessError as err:
1548 self.bitbake_e = ''
1549 logger.warning("Couldn't run 'bitbake -e' to gather environment information:\n%s" % err.output.decode('utf-8'))
1550 1713
1551 def validate_combos(self): 1714 def validate_combos(self):
1552 if (self.fstype in self.vmtypes) and self.kernel: 1715 if (self.fstype in self.vmtypes) and self.kernel:
@@ -1576,7 +1739,7 @@ class BaseConfig(object):
1576 return result 1739 return result
1577 raise RunQemuError("Native sysroot directory %s doesn't exist" % result) 1740 raise RunQemuError("Native sysroot directory %s doesn't exist" % result)
1578 else: 1741 else:
1579 raise RunQemuError("Can't find STAGING_BINDIR_NATIVE in '%s' output" % cmd) 1742 raise RunQemuError("Can't find STAGING_BINDIR_NATIVE in '%s' output" % str(cmd))
1580 1743
1581 1744
1582def main(): 1745def main():
@@ -1592,11 +1755,8 @@ def main():
1592 subprocess.check_call([renice, str(os.getpid())]) 1755 subprocess.check_call([renice, str(os.getpid())])
1593 1756
1594 def sigterm_handler(signum, frame): 1757 def sigterm_handler(signum, frame):
1595 logger.info("SIGTERM received") 1758 logger.info("Received signal: %s" % (signum))
1596 os.kill(config.qemupid, signal.SIGTERM)
1597 config.cleanup() 1759 config.cleanup()
1598 # Deliberately ignore the return code of 'tput smam'.
1599 subprocess.call(["tput", "smam"])
1600 signal.signal(signal.SIGTERM, sigterm_handler) 1760 signal.signal(signal.SIGTERM, sigterm_handler)
1601 1761
1602 config.check_args() 1762 config.check_args()
@@ -1608,6 +1768,7 @@ def main():
1608 config.setup_network() 1768 config.setup_network()
1609 config.setup_rootfs() 1769 config.setup_rootfs()
1610 config.setup_final() 1770 config.setup_final()
1771 config.setup_cmd()
1611 config.start_qemu() 1772 config.start_qemu()
1612 except RunQemuError as err: 1773 except RunQemuError as err:
1613 logger.error(err) 1774 logger.error(err)
@@ -1617,9 +1778,8 @@ def main():
1617 traceback.print_exc() 1778 traceback.print_exc()
1618 return 1 1779 return 1
1619 finally: 1780 finally:
1781 config.cleanup_cmd()
1620 config.cleanup() 1782 config.cleanup()
1621 # Deliberately ignore the return code of 'tput smam'.
1622 subprocess.call(["tput", "smam"])
1623 1783
1624if __name__ == "__main__": 1784if __name__ == "__main__":
1625 sys.exit(main()) 1785 sys.exit(main())
diff --git a/scripts/runqemu-addptable2image b/scripts/runqemu-addptable2image
index ca29427258..87a8da3a63 100755
--- a/scripts/runqemu-addptable2image
+++ b/scripts/runqemu-addptable2image
@@ -1,6 +1,6 @@
1#!/bin/sh 1#!/bin/sh
2 2
3# Add a partion table to an ext2 image file 3# Add a partition table to an ext2 image file
4# 4#
5# Copyright (C) 2006-2007 OpenedHand Ltd. 5# Copyright (C) 2006-2007 OpenedHand Ltd.
6# 6#
diff --git a/scripts/runqemu-export-rootfs b/scripts/runqemu-export-rootfs
index 384c091713..6a8acd0d5a 100755
--- a/scripts/runqemu-export-rootfs
+++ b/scripts/runqemu-export-rootfs
@@ -34,16 +34,12 @@ if [ -z "$SYSROOT_SETUP_SCRIPT" ]; then
34 echo "Did you forget to source your build environment setup script?" 34 echo "Did you forget to source your build environment setup script?"
35 exit 1 35 exit 1
36fi 36fi
37. $SYSROOT_SETUP_SCRIPT meta-ide-support 37. $SYSROOT_SETUP_SCRIPT qemu-helper-native
38 38
39if [ ! -e "$OECORE_NATIVE_SYSROOT/usr/bin/unfsd" ]; then 39if [ ! -e "$OECORE_NATIVE_SYSROOT/usr/bin/unfsd" ]; then
40 echo "Error: Unable to find unfsd binary in $OECORE_NATIVE_SYSROOT/usr/bin/" 40 echo "Error: Unable to find unfsd binary in $OECORE_NATIVE_SYSROOT/usr/bin/"
41 41
42 if [ "x$OECORE_DISTRO_VERSION" = "x" ]; then 42 echo "This shouldn't happen - something is missing from your toolchain installation"
43 echo "Have you run 'bitbake meta-ide-support'?"
44 else
45 echo "This shouldn't happen - something is missing from your toolchain installation"
46 fi
47 exit 1 43 exit 1
48fi 44fi
49 45
@@ -74,26 +70,11 @@ MOUNTD_PORT=${MOUNTD_PORT:=$[ 3048 + 2 * $NFS_INSTANCE ]}
74 70
75## For debugging you would additionally add 71## For debugging you would additionally add
76## --debug all 72## --debug all
77UNFSD_OPTS="-p -N -i $NFSPID -e $EXPORTS -n $NFSD_PORT -m $MOUNTD_PORT" 73UNFSD_OPTS="-p -i $NFSPID -e $EXPORTS -n $NFSD_PORT -m $MOUNTD_PORT"
78 74
79# See how we were called. 75# See how we were called.
80case "$1" in 76case "$1" in
81 start) 77 start)
82 PORTMAP_RUNNING=`ps -ef | grep portmap | grep -v grep`
83 RPCBIND_RUNNING=`ps -ef | grep rpcbind | grep -v grep`
84 if [[ "x$PORTMAP_RUNNING" = "x" && "x$RPCBIND_RUNNING" = "x" ]]; then
85 echo "======================================================="
86 echo "Error: neither rpcbind nor portmap appear to be running"
87 echo "Please install and start one of these services first"
88 echo "======================================================="
89 echo "Tip: for recent Ubuntu hosts, run:"
90 echo " sudo apt-get install rpcbind"
91 echo "Then add OPTIONS=\"-i -w\" to /etc/default/rpcbind and run"
92 echo " sudo service portmap restart"
93
94 exit 1
95 fi
96
97 echo "Creating exports file..." 78 echo "Creating exports file..."
98 echo "$NFS_EXPORT_DIR (rw,no_root_squash,no_all_squash,insecure)" > $EXPORTS 79 echo "$NFS_EXPORT_DIR (rw,no_root_squash,no_all_squash,insecure)" > $EXPORTS
99 80
diff --git a/scripts/runqemu-extract-sdk b/scripts/runqemu-extract-sdk
index 9bc0c07fb8..db05da25f2 100755
--- a/scripts/runqemu-extract-sdk
+++ b/scripts/runqemu-extract-sdk
@@ -25,7 +25,7 @@ if [ -z "$SYSROOT_SETUP_SCRIPT" ]; then
25 echo "Did you forget to source your build system environment setup script?" 25 echo "Did you forget to source your build system environment setup script?"
26 exit 1 26 exit 1
27fi 27fi
28. $SYSROOT_SETUP_SCRIPT meta-ide-support 28. $SYSROOT_SETUP_SCRIPT qemu-helper-native
29PSEUDO_OPTS="-P $OECORE_NATIVE_SYSROOT/usr" 29PSEUDO_OPTS="-P $OECORE_NATIVE_SYSROOT/usr"
30 30
31ROOTFS_TARBALL=$1 31ROOTFS_TARBALL=$1
diff --git a/scripts/runqemu-gen-tapdevs b/scripts/runqemu-gen-tapdevs
index a6ee4517da..a00c79c442 100755
--- a/scripts/runqemu-gen-tapdevs
+++ b/scripts/runqemu-gen-tapdevs
@@ -1,53 +1,58 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# Create a "bank" of tap network devices that can be used by the 3# Create a "bank" of tap network devices that can be used by the
4# runqemu script. This script needs to be run as root, and will 4# runqemu script. This script needs to be run as root
5# use the tunctl binary from the build system sysroot. Note: many Linux
6# distros these days still use an older version of tunctl which does not
7# support the group permissions option, hence the need to use the build
8# system provided version.
9# 5#
10# Copyright (C) 2010 Intel Corp. 6# Copyright (C) 2010 Intel Corp.
11# 7#
12# SPDX-License-Identifier: GPL-2.0-only 8# SPDX-License-Identifier: GPL-2.0-only
13# 9#
14 10
15uid=`id -u`
16gid=`id -g` 11gid=`id -g`
17if [ -n "$SUDO_UID" ]; then
18 uid=$SUDO_UID
19fi
20if [ -n "$SUDO_GID" ]; then 12if [ -n "$SUDO_GID" ]; then
21 gid=$SUDO_GID 13 gid=$SUDO_GID
22fi 14fi
23 15
24usage() { 16usage() {
25 echo "Usage: sudo $0 <uid> <gid> <num> <staging_bindir_native>" 17 echo "Usage: sudo $0 <gid> <num>"
26 echo "Where <uid> is the numeric user id the tap devices will be owned by"
27 echo "Where <gid> is the numeric group id the tap devices will be owned by" 18 echo "Where <gid> is the numeric group id the tap devices will be owned by"
28 echo "<num> is the number of tap devices to create (0 to remove all)" 19 echo "<num> is the number of tap devices to create (0 to remove all)"
29 echo "<native-sysroot-basedir> is the path to the build system's native sysroot"
30 echo "For example:" 20 echo "For example:"
31 echo "$ bitbake qemu-helper-native" 21 echo "$ bitbake qemu-helper-native"
32 echo "$ sudo $0 $uid $gid 4 tmp/sysroots-components/x86_64/qemu-helper-native/usr/bin" 22 echo "$ sudo $0 $gid 4"
33 echo "" 23 echo ""
34 exit 1 24 exit 1
35} 25}
36 26
37if [ $# -ne 4 ]; then 27# Allow passing 4 arguments for backward compatibility with warning
28if [ $# -gt 4 ]; then
29 echo "Error: Incorrect number of arguments"
30 usage
31fi
32if [ $# -gt 3 ]; then
33 echo "Warning: Ignoring the <native-sysroot-basedir> parameter. It is no longer needed."
34fi
35if [ $# -gt 2 ]; then
36 echo "Warning: Ignoring the <uid> parameter. It is no longer needed."
37 GID=$2
38 COUNT=$3
39elif [ $# -eq 2 ]; then
40 GID=$1
41 COUNT=$2
42else
38 echo "Error: Incorrect number of arguments" 43 echo "Error: Incorrect number of arguments"
39 usage 44 usage
40fi 45fi
41 46
42TUID=$1
43GID=$2
44COUNT=$3
45STAGING_BINDIR_NATIVE=$4
46 47
47TUNCTL=$STAGING_BINDIR_NATIVE/tunctl 48if [ -z "$OE_TAP_NAME" ]; then
48if [[ ! -x "$TUNCTL" || -d "$TUNCTL" ]]; then 49 OE_TAP_NAME=tap
49 echo "Error: $TUNCTL is not an executable" 50fi
50 usage 51
52# check if COUNT is a number and >= 0
53if ! [ $COUNT -ge 0 ]; then
54 echo "Error: Incorrect count: $COUNT"
55 exit 1
51fi 56fi
52 57
53if [ $EUID -ne 0 ]; then 58if [ $EUID -ne 0 ]; then
@@ -62,48 +67,41 @@ if [ ! -x "$RUNQEMU_IFUP" ]; then
62 exit 1 67 exit 1
63fi 68fi
64 69
65IFCONFIG=`which ip 2> /dev/null` 70if interfaces=`ip tuntap list` 2>/dev/null; then
66if [ -z "$IFCONFIG" ]; then 71 interfaces=`echo "$interfaces" |cut -f1 -d: |grep -E "^$OE_TAP_NAME.*"`
67 # Is it ever anywhere else?
68 IFCONFIG=/sbin/ip
69fi
70if [ ! -x "$IFCONFIG" ]; then
71 echo "$IFCONFIG cannot be executed"
72 exit 1
73fi
74
75if [ $COUNT -ge 0 ]; then
76 # Ensure we start with a clean slate
77 for tap in `$IFCONFIG link | grep tap | awk '{ print \$2 }' | sed s/://`; do
78 echo "Note: Destroying pre-existing tap interface $tap..."
79 $TUNCTL -d $tap
80 done
81 rm -f /etc/runqemu-nosudo
82else 72else
83 echo "Error: Incorrect count: $COUNT" 73 echo "Failed to call 'ip tuntap list'" >&2
84 exit 1 74 exit 1
85fi 75fi
86 76
87if [ $COUNT -gt 0 ]; then 77# Ensure we start with a clean slate
88 echo "Creating $COUNT tap devices for UID: $TUID GID: $GID..." 78for tap in $interfaces; do
89 for ((index=0; index < $COUNT; index++)); do 79 echo "Note: Destroying pre-existing tap interface $tap..."
90 echo "Creating tap$index" 80 ip tuntap del $tap mode tap
91 ifup=`$RUNQEMU_IFUP $TUID $GID $STAGING_BINDIR_NATIVE 2>&1` 81done
92 if [ $? -ne 0 ]; then 82rm -f /etc/runqemu-nosudo
93 echo "Error running tunctl: $ifup"
94 exit 1
95 fi
96 done
97 83
98 echo "Note: For systems running NetworkManager, it's recommended" 84if [ $COUNT -eq 0 ]; then
99 echo "Note: that the tap devices be set as unmanaged in the" 85 exit 0
100 echo "Note: NetworkManager.conf file. Add the following lines to"
101 echo "Note: /etc/NetworkManager/NetworkManager.conf"
102 echo "[keyfile]"
103 echo "unmanaged-devices=interface-name:tap*"
104
105 # The runqemu script will check for this file, and if it exists,
106 # will use the existing bank of tap devices without creating
107 # additional ones via sudo.
108 touch /etc/runqemu-nosudo
109fi 86fi
87
88echo "Creating $COUNT tap devices for GID: $GID..."
89for ((index=0; index < $COUNT; index++)); do
90 echo "Creating $OE_TAP_NAME$index"
91 if ! ifup=`$RUNQEMU_IFUP $GID 2>&1`; then
92 echo "Error bringing up interface: $ifup"
93 exit 1
94 fi
95done
96
97echo "Note: For systems running NetworkManager, it's recommended"
98echo "Note: that the tap devices be set as unmanaged in the"
99echo "Note: NetworkManager.conf file. Add the following lines to"
100echo "Note: /etc/NetworkManager/NetworkManager.conf"
101echo "[keyfile]"
102echo "unmanaged-devices=interface-name:$OE_TAP_NAME*"
103
104# The runqemu script will check for this file, and if it exists,
105# will use the existing bank of tap devices without creating
106# additional ones via sudo.
107touch /etc/runqemu-nosudo
diff --git a/scripts/runqemu-ifdown b/scripts/runqemu-ifdown
index a104c37bf8..822a2a39b9 100755
--- a/scripts/runqemu-ifdown
+++ b/scripts/runqemu-ifdown
@@ -1,8 +1,7 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# QEMU network configuration script to bring down tap devices. This 3# QEMU network configuration script to bring down tap devices. This
4# utility needs to be run as root, and will use the tunctl binary 4# utility needs to be run as root, and will use the ip utility
5# from the native sysroot.
6# 5#
7# If you find yourself calling this script a lot, you can add the 6# If you find yourself calling this script a lot, you can add the
8# the following to your /etc/sudoers file to be able to run this 7# the following to your /etc/sudoers file to be able to run this
@@ -17,7 +16,7 @@
17# 16#
18 17
19usage() { 18usage() {
20 echo "sudo $(basename $0) <tap-dev> <native-sysroot-basedir>" 19 echo "sudo $(basename $0) <tap-dev>"
21} 20}
22 21
23if [ $EUID -ne 0 ]; then 22if [ $EUID -ne 0 ]; then
@@ -25,30 +24,31 @@ if [ $EUID -ne 0 ]; then
25 exit 1 24 exit 1
26fi 25fi
27 26
28if [ $# -ne 2 ]; then 27if [ $# -gt 2 ] || [ $# -lt 1 ]; then
29 usage 28 usage
30 exit 1 29 exit 1
31fi 30fi
32 31
32# backward compatibility
33if [ $# -eq 2 ] ; then
34 echo "Warning: native-sysroot-basedir parameter is ignored. It is no longer needed." >&2
35fi
36
33TAP=$1 37TAP=$1
34STAGING_BINDIR_NATIVE=$2
35 38
36TUNCTL=$STAGING_BINDIR_NATIVE/tunctl 39if ! ip tuntap del $TAP mode tap 2>/dev/null; then
37if [ ! -e "$TUNCTL" ]; then 40 echo "Error: Unable to run up tuntap del"
38 echo "Error: Unable to find tunctl binary in '$STAGING_BINDIR_NATIVE', please bitbake qemu-helper-native"
39 exit 1 41 exit 1
40fi 42fi
41 43
42$TUNCTL -d $TAP 44IPTOOL=`which ip 2> /dev/null`
43 45if [ "x$IPTOOL" = "x" ]; then
44IFCONFIG=`which ip 2> /dev/null`
45if [ "x$IFCONFIG" = "x" ]; then
46 # better than nothing... 46 # better than nothing...
47 IFCONFIG=/sbin/ip 47 IPTOOL=/sbin/ip
48fi 48fi
49if [ -x "$IFCONFIG" ]; then 49if [ -x "$IPTOOL" ]; then
50 if `$IFCONFIG link show $TAP > /dev/null 2>&1`; then 50 if `$IPTOOL link show $TAP > /dev/null 2>&1`; then
51 $IFCONFIG link del $TAP 51 $IPTOOL link del $TAP
52 fi 52 fi
53fi 53fi
54# cleanup the remaining iptables rules 54# cleanup the remaining iptables rules
@@ -60,7 +60,13 @@ if [ ! -x "$IPTABLES" ]; then
60 echo "$IPTABLES cannot be executed" 60 echo "$IPTABLES cannot be executed"
61 exit 1 61 exit 1
62fi 62fi
63n=$[ (`echo $TAP | sed 's/tap//'` * 2) + 1 ] 63
64dest=$[ (`echo $TAP | sed 's/tap//'` * 2) + 2 ] 64if [ -z "$OE_TAP_NAME" ]; then
65 OE_TAP_NAME=tap
66fi
67
68n=$[ (`echo $TAP | sed "s/$OE_TAP_NAME//"` * 2) + 1 ]
69dest=$[ (`echo $TAP | sed "s/$OE_TAP_NAME//"` * 2) + 2 ]
65$IPTABLES -D POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$n/32 70$IPTABLES -D POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$n/32
66$IPTABLES -D POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$dest/32 71$IPTABLES -D POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$dest/32
72true
diff --git a/scripts/runqemu-ifup b/scripts/runqemu-ifup
index bb661740c5..05c9325b6b 100755
--- a/scripts/runqemu-ifup
+++ b/scripts/runqemu-ifup
@@ -1,10 +1,7 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# QEMU network interface configuration script. This utility needs to 3# QEMU network interface configuration script. This utility needs to
4# be run as root, and will use the tunctl binary from a native sysroot. 4# be run as root, and will use the ip utility
5# Note: many Linux distros these days still use an older version of
6# tunctl which does not support the group permissions option, hence
7# the need to use build system's version.
8# 5#
9# If you find yourself calling this script a lot, you can add the 6# If you find yourself calling this script a lot, you can add the
10# the following to your /etc/sudoers file to be able to run this 7# the following to your /etc/sudoers file to be able to run this
@@ -24,7 +21,7 @@
24# 21#
25 22
26usage() { 23usage() {
27 echo "sudo $(basename $0) <uid> <gid> <native-sysroot-basedir>" 24 echo "sudo $(basename $0) <gid>"
28} 25}
29 26
30if [ $EUID -ne 0 ]; then 27if [ $EUID -ne 0 ]; then
@@ -32,41 +29,43 @@ if [ $EUID -ne 0 ]; then
32 exit 1 29 exit 1
33fi 30fi
34 31
35if [ $# -ne 3 ]; then 32if [ $# -eq 2 ]; then
33 echo "Warning: uid parameter is ignored. It is no longer needed." >&2
34 GROUP="$2"
35elif [ $# -eq 1 ]; then
36 GROUP="$1"
37else
36 usage 38 usage
37 exit 1 39 exit 1
38fi 40fi
39 41
40USERID="-u $1"
41GROUP="-g $2"
42STAGING_BINDIR_NATIVE=$3
43 42
44TUNCTL=$STAGING_BINDIR_NATIVE/tunctl 43if [ -z "$OE_TAP_NAME" ]; then
45if [ ! -x "$TUNCTL" ]; then 44 OE_TAP_NAME=tap
46 echo "Error: Unable to find tunctl binary in '$STAGING_BINDIR_NATIVE', please bitbake qemu-helper-native"
47 exit 1
48fi 45fi
49 46
50TAP=`$TUNCTL -b $GROUP 2>&1` 47if taps=$(ip tuntap list 2>/dev/null); then
51STATUS=$? 48 tap_no_last=$(echo "$taps" |cut -f 1 -d ":" |grep -E "^$OE_TAP_NAME.*" |sed "s/$OE_TAP_NAME//g" | sort -rn | head -n 1)
52if [ $STATUS -ne 0 ]; then 49 if [ -z "$tap_no_last" ]; then
53# If tunctl -g fails, try using tunctl -u, for older host kernels 50 tap_no=0
54# which do not support the TUNSETGROUP ioctl 51 else
55 TAP=`$TUNCTL -b $USERID 2>&1` 52 tap_no=$(("$tap_no_last" + 1))
56 STATUS=$?
57 if [ $STATUS -ne 0 ]; then
58 echo "tunctl failed:"
59 exit 1
60 fi 53 fi
54 ip tuntap add "$OE_TAP_NAME$tap_no" mode tap group "$GROUP" && TAP=$OE_TAP_NAME$tap_no
55fi
56
57if [ -z "$TAP" ]; then
58 echo "Error: Unable to find a tap device to use"
59 exit 1
61fi 60fi
62 61
63IFCONFIG=`which ip 2> /dev/null` 62IPTOOL=`which ip 2> /dev/null`
64if [ "x$IFCONFIG" = "x" ]; then 63if [ "x$IPTOOL" = "x" ]; then
65 # better than nothing... 64 # better than nothing...
66 IFCONFIG=/sbin/ip 65 IPTOOL=/sbin/ip
67fi 66fi
68if [ ! -x "$IFCONFIG" ]; then 67if [ ! -x "$IPTOOL" ]; then
69 echo "$IFCONFIG cannot be executed" 68 echo "$IPTOOL cannot be executed"
70 exit 1 69 exit 1
71fi 70fi
72 71
@@ -79,22 +78,22 @@ if [ ! -x "$IPTABLES" ]; then
79 exit 1 78 exit 1
80fi 79fi
81 80
82n=$[ (`echo $TAP | sed 's/tap//'` * 2) + 1 ] 81n=$[ (`echo $TAP | sed "s/$OE_TAP_NAME//"` * 2) + 1 ]
83$IFCONFIG addr add 192.168.7.$n/32 broadcast 192.168.7.255 dev $TAP 82$IPTOOL addr add 192.168.7.$n/32 broadcast 192.168.7.255 dev $TAP
84STATUS=$? 83STATUS=$?
85if [ $STATUS -ne 0 ]; then 84if [ $STATUS -ne 0 ]; then
86 echo "Failed to set up IP addressing on $TAP" 85 echo "Failed to set up IP addressing on $TAP"
87 exit 1 86 exit 1
88fi 87fi
89$IFCONFIG link set dev $TAP up 88$IPTOOL link set dev $TAP up
90STATUS=$? 89STATUS=$?
91if [ $STATUS -ne 0 ]; then 90if [ $STATUS -ne 0 ]; then
92 echo "Failed to bring up $TAP" 91 echo "Failed to bring up $TAP"
93 exit 1 92 exit 1
94fi 93fi
95 94
96dest=$[ (`echo $TAP | sed 's/tap//'` * 2) + 2 ] 95dest=$[ (`echo $TAP | sed "s/$OE_TAP_NAME//"` * 2) + 2 ]
97$IFCONFIG route add to 192.168.7.$dest dev $TAP 96$IPTOOL route add to 192.168.7.$dest dev $TAP
98STATUS=$? 97STATUS=$?
99if [ $STATUS -ne 0 ]; then 98if [ $STATUS -ne 0 ]; then
100 echo "Failed to add route to 192.168.7.$dest using $TAP" 99 echo "Failed to add route to 192.168.7.$dest using $TAP"
diff --git a/scripts/runqemu.README b/scripts/runqemu.README
index da9abd7dfb..e5f4b4634c 100644
--- a/scripts/runqemu.README
+++ b/scripts/runqemu.README
@@ -1,12 +1,12 @@
1Using OE images with QEMU 1Using OE images with QEMU
2========================= 2=========================
3 3
4OE-Core can generate qemu bootable kernels and images with can be used 4OE-Core can generate qemu bootable kernels and images which can be used
5on a desktop system. The scripts currently support booting ARM, MIPS, PowerPC 5on a desktop system. The scripts currently support booting ARM, MIPS, PowerPC
6and x86 (32 and 64 bit) images. The scripts can be used within the OE build 6and x86 (32 and 64 bit) images. The scripts can be used within the OE build
7system or externaly. 7system or externally.
8 8
9The runqemu script is run as: 9The runqemu script is run as:
10 10
11 runqemu <machine> <zimage> <filesystem> 11 runqemu <machine> <zimage> <filesystem>
12 12
@@ -15,13 +15,13 @@ where:
15 <machine> is the machine/architecture to use (qemuarm/qemumips/qemuppc/qemux86/qemux86-64) 15 <machine> is the machine/architecture to use (qemuarm/qemumips/qemuppc/qemux86/qemux86-64)
16 <zimage> is the path to a kernel (e.g. zimage-qemuarm.bin) 16 <zimage> is the path to a kernel (e.g. zimage-qemuarm.bin)
17 <filesystem> is the path to an ext2 image (e.g. filesystem-qemuarm.ext2) or an nfs directory 17 <filesystem> is the path to an ext2 image (e.g. filesystem-qemuarm.ext2) or an nfs directory
18 18
19If <machine> isn't specified, the script will try to detect the machine name 19If <machine> isn't specified, the script will try to detect the machine name
20from the name of the <zimage> file. 20from the name of the <zimage> file.
21 21
22If <filesystem> isn't specified, nfs booting will be assumed. 22If <filesystem> isn't specified, nfs booting will be assumed.
23 23
24When used within the build system, it will default to qemuarm, ext2 and the last kernel and 24When used within the build system, it will default to qemuarm, ext2 and the last kernel and
25core-image-sato-sdk image built by the build system. If an sdk image isn't present it will look 25core-image-sato-sdk image built by the build system. If an sdk image isn't present it will look
26for sato and minimal images. 26for sato and minimal images.
27 27
@@ -31,7 +31,7 @@ Full usage instructions can be seen by running the command with no options speci
31Notes 31Notes
32===== 32=====
33 33
34 - The scripts run qemu using sudo. Change perms on /dev/net/tun to 34 - The scripts run qemu using sudo. Change perms on /dev/net/tun to
35 run as non root. The runqemu-gen-tapdevs script can also be used by 35 run as non root. The runqemu-gen-tapdevs script can also be used by
36 root to prepopulate the appropriate network devices. 36 root to prepopulate the appropriate network devices.
37 - You can access the host computer at 192.168.7.1 within the image. 37 - You can access the host computer at 192.168.7.1 within the image.
diff --git a/scripts/send-error-report b/scripts/send-error-report
index cfbcaa52cb..cc1bc7c2b1 100755
--- a/scripts/send-error-report
+++ b/scripts/send-error-report
@@ -6,6 +6,7 @@
6# Copyright (C) 2013 Intel Corporation 6# Copyright (C) 2013 Intel Corporation
7# Author: Andreea Proca <andreea.b.proca@intel.com> 7# Author: Andreea Proca <andreea.b.proca@intel.com>
8# Author: Michael Wood <michael.g.wood@intel.com> 8# Author: Michael Wood <michael.g.wood@intel.com>
9# Author: Thomas Perrot <thomas.perrot@bootlin.com>
9# 10#
10# SPDX-License-Identifier: GPL-2.0-only 11# SPDX-License-Identifier: GPL-2.0-only
11# 12#
@@ -22,7 +23,7 @@ scripts_lib_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'li
22sys.path.insert(0, scripts_lib_path) 23sys.path.insert(0, scripts_lib_path)
23import argparse_oe 24import argparse_oe
24 25
25version = "0.3" 26version = "0.4"
26 27
27log = logging.getLogger("send-error-report") 28log = logging.getLogger("send-error-report")
28logging.basicConfig(format='%(levelname)s: %(message)s') 29logging.basicConfig(format='%(levelname)s: %(message)s')
@@ -65,7 +66,7 @@ def edit_content(json_file_path):
65 66
66def prepare_data(args): 67def prepare_data(args):
67 # attempt to get the max_log_size from the server's settings 68 # attempt to get the max_log_size from the server's settings
68 max_log_size = getPayloadLimit(args.protocol+args.server+"/ClientPost/JSON") 69 max_log_size = getPayloadLimit(args.server+"/ClientPost/JSON")
69 70
70 if not os.path.isfile(args.error_file): 71 if not os.path.isfile(args.error_file):
71 log.error("No data file found.") 72 log.error("No data file found.")
@@ -135,19 +136,38 @@ def send_data(data, args):
135 headers={'Content-type': 'application/json', 'User-Agent': "send-error-report/"+version} 136 headers={'Content-type': 'application/json', 'User-Agent': "send-error-report/"+version}
136 137
137 if args.json: 138 if args.json:
138 url = args.protocol+args.server+"/ClientPost/JSON/" 139 url = args.server+"/ClientPost/JSON/"
139 else: 140 else:
140 url = args.protocol+args.server+"/ClientPost/" 141 url = args.server+"/ClientPost/"
141 142
142 req = urllib.request.Request(url, data=data, headers=headers) 143 req = urllib.request.Request(url, data=data, headers=headers)
144
145 log.debug(f"Request URL: {url}")
146 log.debug(f"Request Headers: {headers}")
147 log.debug(f"Request Data: {data.decode('utf-8')}")
148
143 try: 149 try:
144 response = urllib.request.urlopen(req) 150 response = urllib.request.urlopen(req)
145 except urllib.error.HTTPError as e: 151 except urllib.error.HTTPError as e:
146 logging.error(str(e)) 152 log.error(f"HTTP Error {e.code}: {e.reason}")
153 log.debug(f"Response Content: {e.read().decode('utf-8')}")
147 sys.exit(1) 154 sys.exit(1)
148 155
156 log.debug(f"Response Status: {response.status}")
157 log.debug(f"Response Headers: {response.getheaders()}")
149 print(response.read().decode('utf-8')) 158 print(response.read().decode('utf-8'))
150 159
160def validate_server_url(args):
161 # Get the error report server from an argument
162 server = args.server or 'https://errors.yoctoproject.org'
163
164 if not server.startswith('http://') and not server.startswith('https://'):
165 log.error("Missing a URL scheme either http:// or https:// in the server name: " + server)
166 sys.exit(1)
167
168 # Construct the final URL
169 return f"{server}"
170
151 171
152if __name__ == '__main__': 172if __name__ == '__main__':
153 arg_parse = argparse_oe.ArgumentParser(description="This scripts will send an error report to your specified error-report-web server.") 173 arg_parse = argparse_oe.ArgumentParser(description="This scripts will send an error report to your specified error-report-web server.")
@@ -164,8 +184,7 @@ if __name__ == '__main__':
164 arg_parse.add_argument("-s", 184 arg_parse.add_argument("-s",
165 "--server", 185 "--server",
166 help="Server to send error report to", 186 help="Server to send error report to",
167 type=str, 187 type=str)
168 default="errors.yoctoproject.org")
169 188
170 arg_parse.add_argument("-e", 189 arg_parse.add_argument("-e",
171 "--email", 190 "--email",
@@ -190,18 +209,22 @@ if __name__ == '__main__':
190 help="Return the result in json format, silences all other output", 209 help="Return the result in json format, silences all other output",
191 action="store_true") 210 action="store_true")
192 211
193 arg_parse.add_argument("--no-ssl", 212 arg_parse.add_argument("-d",
194 help="Use http instead of https protocol", 213 "--debug",
195 dest="protocol", 214 help="Enable debug mode to print request/response details",
196 action="store_const", const="http://", default="https://") 215 action="store_true")
197
198
199 216
200 args = arg_parse.parse_args() 217 args = arg_parse.parse_args()
201 218
219 args.server = validate_server_url(args)
220
202 if (args.json == False): 221 if (args.json == False):
203 print("Preparing to send errors to: "+args.server) 222 print("Preparing to send errors to: "+args.server)
204 223
224 # Enable debugging if requested
225 if args.debug:
226 log.setLevel(logging.DEBUG)
227
205 data = prepare_data(args) 228 data = prepare_data(args)
206 send_data(data, args) 229 send_data(data, args)
207 230
diff --git a/scripts/sstate-cache-management.py b/scripts/sstate-cache-management.py
new file mode 100755
index 0000000000..303b8f13a3
--- /dev/null
+++ b/scripts/sstate-cache-management.py
@@ -0,0 +1,336 @@
1#!/usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8import argparse
9import os
10import re
11import sys
12
13from collections import defaultdict
14from concurrent.futures import ThreadPoolExecutor
15from dataclasses import dataclass
16from pathlib import Path
17
18if sys.version_info < (3, 8, 0):
19 raise RuntimeError("Sorry, python 3.8.0 or later is required for this script.")
20
21SSTATE_PREFIX = "sstate:"
22SSTATE_EXTENSION = ".tar.zst"
23# SSTATE_EXTENSION = ".tgz"
24# .siginfo.done files are mentioned in the original script?
25SSTATE_SUFFIXES = (
26 SSTATE_EXTENSION,
27 f"{SSTATE_EXTENSION}.siginfo",
28 f"{SSTATE_EXTENSION}.done",
29)
30
31RE_SSTATE_PKGSPEC = re.compile(
32 rf"""sstate:(?P<pn>[^:]*):
33 (?P<package_target>[^:]*):
34 (?P<pv>[^:]*):
35 (?P<pr>[^:]*):
36 (?P<sstate_pkgarch>[^:]*):
37 (?P<sstate_version>[^_]*):
38 (?P<bb_unihash>[^_]*)_
39 (?P<bb_task>[^:]*)
40 (?P<ext>({"|".join([re.escape(s) for s in SSTATE_SUFFIXES])}))$""",
41 re.X,
42)
43
44
45# Really we'd like something like a Path subclass which implements a stat
46# cache here, unfortunately there's no good way to do that transparently
47# (yet); see:
48#
49# https://github.com/python/cpython/issues/70219
50# https://discuss.python.org/t/make-pathlib-extensible/3428/77
51@dataclass
52class SstateEntry:
53 """Class for keeping track of an entry in sstate-cache."""
54
55 path: Path
56 match: re.Match
57 stat_result: os.stat_result = None
58
59 def __hash__(self):
60 return self.path.__hash__()
61
62 def __getattr__(self, name):
63 return self.match.group(name)
64
65
66# this is what's in the original script; as far as I can tell, it's an
67# implementation artefact which we don't need?
68def find_archs():
69 # all_archs
70 builder_arch = os.uname().machine
71
72 # FIXME
73 layer_paths = [Path("../..")]
74
75 tune_archs = set()
76 re_tune = re.compile(r'AVAILTUNES .*=.*"(.*)"')
77 for path in layer_paths:
78 for tunefile in [
79 p for p in path.glob("meta*/conf/machine/include/**/*") if p.is_file()
80 ]:
81 with open(tunefile) as f:
82 for line in f:
83 m = re_tune.match(line)
84 if m:
85 tune_archs.update(m.group(1).split())
86
87 # all_machines
88 machine_archs = set()
89 for path in layer_paths:
90 for machine_file in path.glob("meta*/conf/machine/*.conf"):
91 machine_archs.add(machine_file.parts[-1][:-5])
92
93 extra_archs = set()
94 all_archs = (
95 set(
96 arch.replace("-", "_")
97 for arch in machine_archs | tune_archs | set(["allarch", builder_arch])
98 )
99 | extra_archs
100 )
101
102 print(all_archs)
103
104
105# again, not needed?
106def find_tasks():
107 print(set([p.bb_task for p in paths]))
108
109
110def collect_sstate_paths(args):
111 def scandir(path, paths):
112 # Assume everything is a directory; by not checking we avoid needing an
113 # additional stat which is potentially a synchronous roundtrip over NFS
114 try:
115 for p in path.iterdir():
116 filename = p.parts[-1]
117 if filename.startswith(SSTATE_PREFIX):
118 if filename.endswith(SSTATE_SUFFIXES):
119 m = RE_SSTATE_PKGSPEC.match(p.parts[-1])
120 assert m
121 paths.add(SstateEntry(p, m))
122 # ignore other things (includes things like lockfiles)
123 else:
124 scandir(p, paths)
125
126 except NotADirectoryError:
127 pass
128
129 paths = set()
130 # TODO: parellise scandir
131 scandir(Path(args.cache_dir), paths)
132
133 def path_stat(p):
134 p.stat_result = p.path.lstat()
135
136 if args.remove_duplicated:
137 # This is probably slightly performance negative on a local filesystem
138 # when we interact with the GIL; over NFS it's a massive win.
139 with ThreadPoolExecutor(max_workers=args.jobs) as executor:
140 executor.map(path_stat, paths)
141
142 return paths
143
144
145def remove_by_stamps(args, paths):
146 all_sums = set()
147 for stamps_dir in args.stamps_dir:
148 stamps_path = Path(stamps_dir)
149 assert stamps_path.is_dir()
150 re_sigdata = re.compile(r"do_.*\.sigdata\.([^.]*)")
151 all_sums |= set(
152 [
153 re_sigdata.search(x.parts[-1]).group(1)
154 for x in stamps_path.glob("*/*/*.do_*.sigdata.*")
155 ]
156 )
157 re_setscene = re.compile(r"do_.*_setscene\.([^.]*)")
158 all_sums |= set(
159 [
160 re_setscene.search(x.parts[-1]).group(1)
161 for x in stamps_path.glob("*/*/*.do_*_setscene.*")
162 ]
163 )
164 return [p for p in paths if p.bb_unihash not in all_sums]
165
166
167def remove_duplicated(args, paths):
168 # Skip populate_lic as it produces duplicates in a normal build
169 #
170 # 9ae16469e707 sstate-cache-management: skip populate_lic archives when removing duplicates
171 valid_paths = [p for p in paths if p.bb_task != "populate_lic"]
172
173 keep = dict()
174 remove = list()
175 for p in valid_paths:
176 sstate_sig = ":".join([p.pn, p.sstate_pkgarch, p.bb_task, p.ext])
177 if sstate_sig not in keep:
178 keep[sstate_sig] = p
179 elif p.stat_result.st_mtime > keep[sstate_sig].stat_result.st_mtime:
180 remove.append(keep[sstate_sig])
181 keep[sstate_sig] = p
182 else:
183 remove.append(p)
184
185 return remove
186
187
188def remove_orphans(args, paths):
189 remove = list()
190 pathsigs = defaultdict(list)
191 for p in paths:
192 sstate_sig = ":".join([p.pn, p.sstate_pkgarch, p.bb_task])
193 pathsigs[sstate_sig].append(p)
194 for k, v in pathsigs.items():
195 if len([p for p in v if p.ext == SSTATE_EXTENSION]) == 0:
196 remove.extend(v)
197 return remove
198
199
200def parse_arguments():
201 parser = argparse.ArgumentParser(description="sstate cache management utility.")
202
203 parser.add_argument(
204 "--cache-dir",
205 default=os.environ.get("SSTATE_CACHE_DIR"),
206 help="""Specify sstate cache directory, will use the environment
207 variable SSTATE_CACHE_DIR if it is not specified.""",
208 )
209
210 # parser.add_argument(
211 # "--extra-archs",
212 # help="""Specify list of architectures which should be tested, this list
213 # will be extended with native arch, allarch and empty arch. The
214 # script won't be trying to generate list of available archs from
215 # AVAILTUNES in tune files.""",
216 # )
217
218 # parser.add_argument(
219 # "--extra-layer",
220 # help="""Specify the layer which will be used for searching the archs,
221 # it will search the meta and meta-* layers in the top dir by
222 # default, and will search meta, meta-*, <layer1>, <layer2>,
223 # ...<layern> when specified. Use "," as the separator.
224 #
225 # This is useless for --stamps-dir or when --extra-archs is used.""",
226 # )
227
228 parser.add_argument(
229 "-d",
230 "--remove-duplicated",
231 action="store_true",
232 help="""Remove the duplicated sstate cache files of one package, only
233 the newest one will be kept. The duplicated sstate cache files
234 of one package must have the same arch, which means sstate cache
235 files with multiple archs are not considered duplicate.
236
237 Conflicts with --stamps-dir.""",
238 )
239
240 parser.add_argument(
241 "--remove-orphans",
242 action="store_true",
243 help=f"""Remove orphan siginfo files from the sstate cache, i.e. those
244 where this is no {SSTATE_EXTENSION} file but there are associated
245 tracking files.""",
246 )
247
248 parser.add_argument(
249 "--stamps-dir",
250 action="append",
251 help="""Specify the build directory's stamps directories, the sstate
252 cache file which IS USED by these build diretories will be KEPT,
253 other sstate cache files in cache-dir will be removed. Can be
254 specified multiple times for several directories.
255
256 Conflicts with --remove-duplicated.""",
257 )
258
259 parser.add_argument(
260 "-j", "--jobs", default=8, type=int, help="Run JOBS jobs in parallel."
261 )
262
263 # parser.add_argument(
264 # "-L",
265 # "--follow-symlink",
266 # action="store_true",
267 # help="Remove both the symbol link and the destination file, default: no.",
268 # )
269
270 parser.add_argument(
271 "-n", "--dry-run", action="store_true", help="Don't execute, just go through the motions."
272 )
273
274 parser.add_argument(
275 "-y",
276 "--yes",
277 action="store_true",
278 help="""Automatic yes to prompts; assume "yes" as answer to all prompts
279 and run non-interactively.""",
280 )
281
282 parser.add_argument(
283 "-v", "--verbose", action="store_true", help="Explain what is being done."
284 )
285
286 parser.add_argument(
287 "-D",
288 "--debug",
289 action="count",
290 default=0,
291 help="Show debug info, repeat for more debug info.",
292 )
293
294 args = parser.parse_args()
295 if args.cache_dir is None or (
296 not args.remove_duplicated and not args.stamps_dir and not args.remove_orphans
297 ):
298 parser.print_usage()
299 sys.exit(1)
300
301 return args
302
303
304def main():
305 args = parse_arguments()
306
307 paths = collect_sstate_paths(args)
308 if args.remove_duplicated:
309 remove = remove_duplicated(args, paths)
310 elif args.stamps_dir:
311 remove = remove_by_stamps(args, paths)
312 else:
313 remove = list()
314
315 if args.remove_orphans:
316 remove = set(remove) | set(remove_orphans(args, paths))
317
318 if args.debug >= 1:
319 print("\n".join([str(p.path) for p in remove]))
320 print(f"{len(remove)} out of {len(paths)} files will be removed!")
321 if args.dry_run:
322 return 0
323
324 if not args.yes:
325 print("Do you want to continue (y/n)?")
326 confirm = input() in ("y", "Y")
327 else:
328 confirm = True
329 if confirm:
330 # TODO: parallelise remove
331 for p in remove:
332 p.path.unlink()
333
334
335if __name__ == "__main__":
336 main()
diff --git a/scripts/sstate-cache-management.sh b/scripts/sstate-cache-management.sh
deleted file mode 100755
index d39671f7c6..0000000000
--- a/scripts/sstate-cache-management.sh
+++ /dev/null
@@ -1,458 +0,0 @@
1#!/bin/bash
2
3# Copyright (c) 2012 Wind River Systems, Inc.
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8# Global vars
9cache_dir=
10confirm=
11fsym=
12total_deleted=0
13verbose=
14debug=0
15
16usage () {
17 cat << EOF
18Welcome to sstate cache management utilities.
19sstate-cache-management.sh <OPTION>
20
21Options:
22 -h, --help
23 Display this help and exit.
24
25 --cache-dir=<sstate cache dir>
26 Specify sstate cache directory, will use the environment
27 variable SSTATE_CACHE_DIR if it is not specified.
28
29 --extra-archs=<arch1>,<arch2>...<archn>
30 Specify list of architectures which should be tested, this list
31 will be extended with native arch, allarch and empty arch. The
32 script won't be trying to generate list of available archs from
33 AVAILTUNES in tune files.
34
35 --extra-layer=<layer1>,<layer2>...<layern>
36 Specify the layer which will be used for searching the archs,
37 it will search the meta and meta-* layers in the top dir by
38 default, and will search meta, meta-*, <layer1>, <layer2>,
39 ...<layern> when specified. Use "," as the separator.
40
41 This is useless for --stamps-dir or when --extra-archs is used.
42
43 -d, --remove-duplicated
44 Remove the duplicated sstate cache files of one package, only
45 the newest one will be kept. The duplicated sstate cache files
46 of one package must have the same arch, which means sstate cache
47 files with multiple archs are not considered duplicate.
48
49 Conflicts with --stamps-dir.
50
51 --stamps-dir=<dir1>,<dir2>...<dirn>
52 Specify the build directory's stamps directories, the sstate
53 cache file which IS USED by these build diretories will be KEPT,
54 other sstate cache files in cache-dir will be removed. Use ","
55 as the separator. For example:
56 --stamps-dir=build1/tmp/stamps,build2/tmp/stamps
57
58 Conflicts with --remove-duplicated.
59
60 -L, --follow-symlink
61 Remove both the symbol link and the destination file, default: no.
62
63 -y, --yes
64 Automatic yes to prompts; assume "yes" as answer to all prompts
65 and run non-interactively.
66
67 -v, --verbose
68 Explain what is being done.
69
70 -D, --debug
71 Show debug info, repeat for more debug info.
72
73EOF
74}
75
76if [ $# -lt 1 ]; then
77 usage
78 exit 0
79fi
80
81# Echo no files to remove
82no_files () {
83 echo No files to remove
84}
85
86# Echo nothing to do
87do_nothing () {
88 echo Nothing to do
89}
90
91# Read the input "y"
92read_confirm () {
93 echo "$total_deleted out of $total_files files will be removed! "
94 if [ "$confirm" != "y" ]; then
95 echo "Do you want to continue (y/n)? "
96 while read confirm; do
97 [ "$confirm" = "Y" -o "$confirm" = "y" -o "$confirm" = "n" \
98 -o "$confirm" = "N" ] && break
99 echo "Invalid input \"$confirm\", please input 'y' or 'n': "
100 done
101 else
102 echo
103 fi
104}
105
106# Print error information and exit.
107echo_error () {
108 echo "ERROR: $1" >&2
109 exit 1
110}
111
112# Generate the remove list:
113#
114# * Add .done/.siginfo to the remove list
115# * Add destination of symlink to the remove list
116#
117# $1: output file, others: sstate cache file (.tar.zst)
118gen_rmlist (){
119 local rmlist_file="$1"
120 shift
121 local files="$@"
122 for i in $files; do
123 echo $i >> $rmlist_file
124 # Add the ".siginfo"
125 if [ -e $i.siginfo ]; then
126 echo $i.siginfo >> $rmlist_file
127 fi
128 # Add the destination of symlink
129 if [ -L "$i" ]; then
130 if [ "$fsym" = "y" ]; then
131 dest="`readlink -e $i`"
132 if [ -n "$dest" ]; then
133 echo $dest >> $rmlist_file
134 # Remove the .siginfo when .tar.zst is removed
135 if [ -f "$dest.siginfo" ]; then
136 echo $dest.siginfo >> $rmlist_file
137 fi
138 fi
139 fi
140 # Add the ".tar.zst.done" and ".siginfo.done" (may exist in the future)
141 base_fn="${i##/*/}"
142 t_fn="$base_fn.done"
143 s_fn="$base_fn.siginfo.done"
144 for d in $t_fn $s_fn; do
145 if [ -f $cache_dir/$d ]; then
146 echo $cache_dir/$d >> $rmlist_file
147 fi
148 done
149 fi
150 done
151}
152
153# Remove the duplicated cache files for the pkg, keep the newest one
154remove_duplicated () {
155
156 local topdir
157 local oe_core_dir
158 local tunedirs
159 local all_archs
160 local all_machines
161 local ava_archs
162 local arch
163 local file_names
164 local sstate_files_list
165 local fn_tmp
166 local list_suffix=`mktemp` || exit 1
167
168 if [ -z "$extra_archs" ] ; then
169 # Find out the archs in all the layers
170 echo "Figuring out the archs in the layers ... "
171 oe_core_dir=$(dirname $(dirname $(readlink -e $0)))
172 topdir=$(dirname $oe_core_dir)
173 tunedirs="`find $topdir/meta* ${oe_core_dir}/meta* $layers -path '*/meta*/conf/machine/include' 2>/dev/null`"
174 [ -n "$tunedirs" ] || echo_error "Can't find the tune directory"
175 all_machines="`find $topdir/meta* ${oe_core_dir}/meta* $layers -path '*/meta*/conf/machine/*' -name '*.conf' 2>/dev/null | sed -e 's/.*\///' -e 's/.conf$//'`"
176 all_archs=`grep -r -h "^AVAILTUNES .*=" $tunedirs | sed -e 's/.*=//' -e 's/\"//g'`
177 fi
178
179 # Use the "_" to substitute "-", e.g., x86-64 to x86_64, but not for extra_archs which can be something like cortexa9t2-vfp-neon
180 # Sort to remove the duplicated ones
181 # Add allarch and builder arch (native)
182 builder_arch=$(uname -m)
183 all_archs="$(echo allarch $all_archs $all_machines $builder_arch \
184 | sed -e 's/-/_/g' -e 's/ /\n/g' | sort -u) $extra_archs"
185 echo "Done"
186
187 # Total number of files including sstate-, .siginfo and .done files
188 total_files=`find $cache_dir -name 'sstate*' | wc -l`
189 # Save all the sstate files in a file
190 sstate_files_list=`mktemp` || exit 1
191 find $cache_dir -iname 'sstate:*:*:*:*:*:*:*.tar.zst*' >$sstate_files_list
192
193 echo "Figuring out the suffixes in the sstate cache dir ... "
194 sstate_suffixes="`sed 's%.*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^_]*_\([^:]*\)\.tar\.zst.*%\1%g' $sstate_files_list | sort -u`"
195 echo "Done"
196 echo "The following suffixes have been found in the cache dir:"
197 echo $sstate_suffixes
198
199 echo "Figuring out the archs in the sstate cache dir ... "
200 # Using this SSTATE_PKGSPEC definition it's 6th colon separated field
201 # SSTATE_PKGSPEC = "sstate:${PN}:${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}:${PV}:${PR}:${SSTATE_PKGARCH}:${SSTATE_VERSION}:"
202 for arch in $all_archs; do
203 grep -q ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:$arch:[^:]*:[^:]*\.tar\.zst$" $sstate_files_list
204 [ $? -eq 0 ] && ava_archs="$ava_archs $arch"
205 # ${builder_arch}_$arch used by toolchain sstate
206 grep -q ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:${builder_arch}_$arch:[^:]*:[^:]*\.tar\.zst$" $sstate_files_list
207 [ $? -eq 0 ] && ava_archs="$ava_archs ${builder_arch}_$arch"
208 done
209 echo "Done"
210 echo "The following archs have been found in the cache dir:"
211 echo $ava_archs
212 echo ""
213
214 # Save the file list which needs to be removed
215 local remove_listdir=`mktemp -d` || exit 1
216 for suffix in $sstate_suffixes; do
217 if [ "$suffix" = "populate_lic" ] ; then
218 echo "Skipping populate_lic, because removing duplicates doesn't work correctly for them (use --stamps-dir instead)"
219 continue
220 fi
221 # Total number of files including .siginfo and .done files
222 total_files_suffix=`grep ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:_]*_$suffix\.tar\.zst.*" $sstate_files_list | wc -l 2>/dev/null`
223 total_archive_suffix=`grep ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:_]*_$suffix\.tar\.zst$" $sstate_files_list | wc -l 2>/dev/null`
224 # Save the file list to a file, some suffix's file may not exist
225 grep ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:_]*_$suffix\.tar\.zst.*" $sstate_files_list >$list_suffix 2>/dev/null
226 local deleted_archives=0
227 local deleted_files=0
228 for ext in tar.zst tar.zst.siginfo tar.zst.done; do
229 echo "Figuring out the sstate:xxx_$suffix.$ext ... "
230 # Uniq BPNs
231 file_names=`for arch in $ava_archs ""; do
232 sed -ne "s%.*/sstate:\([^:]*\):[^:]*:[^:]*:[^:]*:$arch:[^:]*:[^:]*\.${ext}$%\1%p" $list_suffix
233 done | sort -u`
234
235 fn_tmp=`mktemp` || exit 1
236 rm_list="$remove_listdir/sstate:xxx_$suffix"
237 for fn in $file_names; do
238 [ -z "$verbose" ] || echo "Analyzing sstate:$fn-xxx_$suffix.${ext}"
239 for arch in $ava_archs ""; do
240 grep -h ".*/sstate:$fn:[^:]*:[^:]*:[^:]*:$arch:[^:]*:[^:]*\.${ext}$" $list_suffix >$fn_tmp
241 if [ -s $fn_tmp ] ; then
242 [ $debug -gt 1 ] && echo "Available files for $fn-$arch- with suffix $suffix.${ext}:" && cat $fn_tmp
243 # Use the modification time
244 to_del=$(ls -t $(cat $fn_tmp) | sed -n '1!p')
245 [ $debug -gt 2 ] && echo "Considering to delete: $to_del"
246 # The sstate file which is downloaded from the SSTATE_MIRROR is
247 # put in SSTATE_DIR, and there is a symlink in SSTATE_DIR/??/ to
248 # it, so filter it out from the remove list if it should not be
249 # removed.
250 to_keep=$(ls -t $(cat $fn_tmp) | sed -n '1p')
251 [ $debug -gt 2 ] && echo "Considering to keep: $to_keep"
252 for k in $to_keep; do
253 if [ -L "$k" ]; then
254 # The symlink's destination
255 k_dest="`readlink -e $k`"
256 # Maybe it is the one in cache_dir
257 k_maybe="$cache_dir/${k##/*/}"
258 # Remove it from the remove list if they are the same.
259 if [ "$k_dest" = "$k_maybe" ]; then
260 to_del="`echo $to_del | sed 's#'\"$k_maybe\"'##g'`"
261 fi
262 fi
263 done
264 rm -f $fn_tmp
265 [ $debug -gt 2 ] && echo "Decided to delete: $to_del"
266 gen_rmlist $rm_list.$ext "$to_del"
267 fi
268 done
269 done
270 done
271 deleted_archives=`cat $rm_list.* 2>/dev/null | grep "\.tar\.zst$" | wc -l`
272 deleted_files=`cat $rm_list.* 2>/dev/null | wc -l`
273 [ "$deleted_files" -gt 0 -a $debug -gt 0 ] && cat $rm_list.*
274 echo "($deleted_archives out of $total_archives_suffix .tar.zst files for $suffix suffix will be removed or $deleted_files out of $total_files_suffix when counting also .siginfo and .done files)"
275 let total_deleted=$total_deleted+$deleted_files
276 done
277 deleted_archives=0
278 rm_old_list=$remove_listdir/sstate-old-filenames
279 find $cache_dir -name 'sstate-*.tar.zst' >$rm_old_list
280 [ -s "$rm_old_list" ] && deleted_archives=`cat $rm_old_list | grep "\.tar\.zst$" | wc -l`
281 [ -s "$rm_old_list" ] && deleted_files=`cat $rm_old_list | wc -l`
282 [ -s "$rm_old_list" -a $debug -gt 0 ] && cat $rm_old_list
283 echo "($deleted_archives or .tar.zst files with old sstate-* filenames will be removed or $deleted_files when counting also .siginfo and .done files)"
284 let total_deleted=$total_deleted+$deleted_files
285
286 rm -f $list_suffix
287 rm -f $sstate_files_list
288 if [ $total_deleted -gt 0 ]; then
289 read_confirm
290 if [ "$confirm" = "y" -o "$confirm" = "Y" ]; then
291 for list in `ls $remove_listdir/`; do
292 echo "Removing $list.tar.zst archive (`cat $remove_listdir/$list | wc -w` files) ... "
293 # Remove them one by one to avoid the argument list too long error
294 for i in `cat $remove_listdir/$list`; do
295 rm -f $verbose $i
296 done
297 echo "Done"
298 done
299 echo "$total_deleted files have been removed!"
300 else
301 do_nothing
302 fi
303 else
304 no_files
305 fi
306 [ -d $remove_listdir ] && rm -fr $remove_listdir
307}
308
309# Remove the sstate file by stamps dir, the file not used by the stamps dir
310# will be removed.
311rm_by_stamps (){
312
313 local cache_list=`mktemp` || exit 1
314 local keep_list=`mktemp` || exit 1
315 local rm_list=`mktemp` || exit 1
316 local sums
317 local all_sums
318
319 # Total number of files including sstate-, .siginfo and .done files
320 total_files=`find $cache_dir -type f -name 'sstate*' | wc -l`
321 # Save all the state file list to a file
322 find $cache_dir -type f -name 'sstate*' | sort -u -o $cache_list
323
324 echo "Figuring out the suffixes in the sstate cache dir ... "
325 local sstate_suffixes="`sed 's%.*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^_]*_\([^:]*\)\.tar\.zst.*%\1%g' $cache_list | sort -u`"
326 echo "Done"
327 echo "The following suffixes have been found in the cache dir:"
328 echo $sstate_suffixes
329
330 # Figure out all the md5sums in the stamps dir.
331 echo "Figuring out all the md5sums in stamps dir ... "
332 for i in $sstate_suffixes; do
333 # There is no "\.sigdata" but "_setcene" when it is mirrored
334 # from the SSTATE_MIRRORS, use them to figure out the sum.
335 sums=`find $stamps -maxdepth 3 -name "*.do_$i.*" \
336 -o -name "*.do_${i}_setscene.*" | \
337 sed -ne 's#.*_setscene\.##p' -e 's#.*\.sigdata\.##p' | \
338 sed -e 's#\..*##' | sort -u`
339 all_sums="$all_sums $sums"
340 done
341 echo "Done"
342
343 echo "Figuring out the files which will be removed ... "
344 for i in $all_sums; do
345 grep ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:${i}_.*" $cache_list >>$keep_list
346 done
347 echo "Done"
348
349 if [ -s $keep_list ]; then
350 sort -u $keep_list -o $keep_list
351 to_del=`comm -1 -3 $keep_list $cache_list`
352 gen_rmlist $rm_list "$to_del"
353 let total_deleted=`cat $rm_list | sort -u | wc -w`
354 if [ $total_deleted -gt 0 ]; then
355 [ $debug -gt 0 ] && cat $rm_list | sort -u
356 read_confirm
357 if [ "$confirm" = "y" -o "$confirm" = "Y" ]; then
358 echo "Removing sstate cache files ... ($total_deleted files)"
359 # Remove them one by one to avoid the argument list too long error
360 for i in `cat $rm_list | sort -u`; do
361 rm -f $verbose $i
362 done
363 echo "$total_deleted files have been removed"
364 else
365 do_nothing
366 fi
367 else
368 no_files
369 fi
370 else
371 echo_error "All files in cache dir will be removed! Abort!"
372 fi
373
374 rm -f $cache_list
375 rm -f $keep_list
376 rm -f $rm_list
377}
378
379# Parse arguments
380while [ -n "$1" ]; do
381 case $1 in
382 --cache-dir=*)
383 cache_dir=`echo $1 | sed -e 's#^--cache-dir=##' | xargs readlink -e`
384 [ -d "$cache_dir" ] || echo_error "Invalid argument to --cache-dir"
385 shift
386 ;;
387 --remove-duplicated|-d)
388 rm_duplicated="y"
389 shift
390 ;;
391 --yes|-y)
392 confirm="y"
393 shift
394 ;;
395 --follow-symlink|-L)
396 fsym="y"
397 shift
398 ;;
399 --extra-archs=*)
400 extra_archs=`echo $1 | sed -e 's#^--extra-archs=##' -e 's#,# #g'`
401 [ -n "$extra_archs" ] || echo_error "Invalid extra arch parameter"
402 shift
403 ;;
404 --extra-layer=*)
405 extra_layers=`echo $1 | sed -e 's#^--extra-layer=##' -e 's#,# #g'`
406 [ -n "$extra_layers" ] || echo_error "Invalid extra layer parameter"
407 for i in $extra_layers; do
408 l=`readlink -e $i`
409 if [ -d "$l" ]; then
410 layers="$layers $l"
411 else
412 echo_error "Can't find layer $i"
413 fi
414 done
415 shift
416 ;;
417 --stamps-dir=*)
418 stamps=`echo $1 | sed -e 's#^--stamps-dir=##' -e 's#,# #g'`
419 [ -n "$stamps" ] || echo_error "Invalid stamps dir $i"
420 for i in $stamps; do
421 [ -d "$i" ] || echo_error "Invalid stamps dir $i"
422 done
423 shift
424 ;;
425 --verbose|-v)
426 verbose="-v"
427 shift
428 ;;
429 --debug|-D)
430 debug=`expr $debug + 1`
431 echo "Debug level $debug"
432 shift
433 ;;
434 --help|-h)
435 usage
436 exit 0
437 ;;
438 *)
439 echo "Invalid arguments $*"
440 echo_error "Try 'sstate-cache-management.sh -h' for more information."
441 ;;
442 esac
443done
444
445# sstate cache directory, use environment variable SSTATE_CACHE_DIR
446# if it was not specified, otherwise, error.
447[ -n "$cache_dir" ] || cache_dir=$SSTATE_CACHE_DIR
448[ -n "$cache_dir" ] || echo_error "No cache dir found!"
449[ -d "$cache_dir" ] || echo_error "Invalid cache directory \"$cache_dir\""
450
451[ -n "$rm_duplicated" -a -n "$stamps" ] && \
452 echo_error "Can not use both --remove-duplicated and --stamps-dir"
453
454[ "$rm_duplicated" = "y" ] && remove_duplicated
455[ -n "$stamps" ] && rm_by_stamps
456[ -z "$rm_duplicated" -a -z "$stamps" ] && \
457 echo "What do you want to do?"
458exit 0
diff --git a/scripts/sstate-diff-machines.sh b/scripts/sstate-diff-machines.sh
index 8b64e11be1..5ed413b2ee 100755
--- a/scripts/sstate-diff-machines.sh
+++ b/scripts/sstate-diff-machines.sh
@@ -1,5 +1,7 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Used to compare sstate checksums between MACHINES. 7# Used to compare sstate checksums between MACHINES.
diff --git a/scripts/sstate-sysroot-cruft.sh b/scripts/sstate-sysroot-cruft.sh
index fbf1ca3c43..b2002badfb 100755
--- a/scripts/sstate-sysroot-cruft.sh
+++ b/scripts/sstate-sysroot-cruft.sh
@@ -1,5 +1,7 @@
1#!/bin/sh 1#!/bin/sh
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Used to find files installed in sysroot which are not tracked by sstate manifest 7# Used to find files installed in sysroot which are not tracked by sstate manifest
@@ -145,18 +147,6 @@ WHITELIST="${WHITELIST} \
145 .*/var/cache/fontconfig/ \ 147 .*/var/cache/fontconfig/ \
146" 148"
147 149
148# created by oe.utils.write_ld_so_conf which is used from few bbclasses and recipes:
149# meta/classes/image-prelink.bbclass: oe.utils.write_ld_so_conf(d)
150# meta/classes/insane.bbclass: oe.utils.write_ld_so_conf(d)
151# meta/classes/insane.bbclass: oe.utils.write_ld_so_conf(d)
152# meta/recipes-gnome/gobject-introspection/gobject-introspection_1.48.0.bb: oe.utils.write_ld_so_conf(d)
153# meta/recipes-gnome/gobject-introspection/gobject-introspection_1.48.0.bb: oe.utils.write_ld_so_conf(d)
154# introduced in oe-core commit 7fd1d7e639c2ed7e0699937a5cb245c187b7c811
155# and more visible since added to gobject-introspection in 10e0c1a3a452baa05d160a92a54b2e33cf0fd061
156WHITELIST="${WHITELIST} \
157 [^/]*/etc/ld.so.conf \
158"
159
160SYSROOTS="`readlink -f ${tmpdir}`/sysroots/" 150SYSROOTS="`readlink -f ${tmpdir}`/sysroots/"
161 151
162mkdir ${OUTPUT} 152mkdir ${OUTPUT}
diff --git a/scripts/sysroot-relativelinks.py b/scripts/sysroot-relativelinks.py
index 56e36f3ad5..ccb3c867f0 100755
--- a/scripts/sysroot-relativelinks.py
+++ b/scripts/sysroot-relativelinks.py
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
diff --git a/scripts/task-time b/scripts/task-time
index bcd1e25817..8f71b29b77 100755
--- a/scripts/task-time
+++ b/scripts/task-time
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
diff --git a/scripts/test-remote-image b/scripts/test-remote-image
index d209d22854..1d018992b0 100755
--- a/scripts/test-remote-image
+++ b/scripts/test-remote-image
@@ -152,8 +152,7 @@ class AutoTargetProfile(BaseTargetProfile):
152 return controller 152 return controller
153 153
154 def set_kernel_file(self): 154 def set_kernel_file(self):
155 postconfig = "QA_GET_MACHINE = \"${MACHINE}\"" 155 machine = get_bb_var('MACHINE')
156 machine = get_bb_var('QA_GET_MACHINE', postconfig=postconfig)
157 self.kernel_file = self.kernel_type + '-' + machine + '.bin' 156 self.kernel_file = self.kernel_type + '-' + machine + '.bin'
158 157
159 def set_rootfs_file(self): 158 def set_rootfs_file(self):
@@ -215,13 +214,11 @@ class PublicAB(BaseRepoProfile):
215 def get_repo_path(self): 214 def get_repo_path(self):
216 path = '/machines/' 215 path = '/machines/'
217 216
218 postconfig = "QA_GET_MACHINE = \"${MACHINE}\"" 217 machine = get_bb_var('MACHINE')
219 machine = get_bb_var('QA_GET_MACHINE', postconfig=postconfig)
220 if 'qemu' in machine: 218 if 'qemu' in machine:
221 path += 'qemu/' 219 path += 'qemu/'
222 220
223 postconfig = "QA_GET_DISTRO = \"${DISTRO}\"" 221 distro = get_bb_var('DISTRO')
224 distro = get_bb_var('QA_GET_DISTRO', postconfig=postconfig)
225 path += distro.replace('poky', machine) + '/' 222 path += distro.replace('poky', machine) + '/'
226 return path 223 return path
227 224
diff --git a/scripts/verify-bashisms b/scripts/verify-bashisms
index 14d8c298e9..fc3677c6ed 100755
--- a/scripts/verify-bashisms
+++ b/scripts/verify-bashisms
@@ -1,11 +1,13 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
6import sys, os, subprocess, re, shutil 8import sys, os, subprocess, re, shutil
7 9
8whitelist = ( 10allowed = (
9 # type is supported by dash 11 # type is supported by dash
10 'if type systemctl >/dev/null 2>/dev/null; then', 12 'if type systemctl >/dev/null 2>/dev/null; then',
11 'if type systemd-tmpfiles >/dev/null 2>/dev/null; then', 13 'if type systemd-tmpfiles >/dev/null 2>/dev/null; then',
@@ -19,8 +21,8 @@ whitelist = (
19 '. $target_sdk_dir/${oe_init_build_env_path} $target_sdk_dir >> $LOGFILE' 21 '. $target_sdk_dir/${oe_init_build_env_path} $target_sdk_dir >> $LOGFILE'
20 ) 22 )
21 23
22def is_whitelisted(s): 24def is_allowed(s):
23 for w in whitelist: 25 for w in allowed:
24 if w in s: 26 if w in s:
25 return True 27 return True
26 return False 28 return False
@@ -49,7 +51,7 @@ def process(filename, function, lineno, script):
49 output = e.output.replace(fn.name, function) 51 output = e.output.replace(fn.name, function)
50 if not output or not output.startswith('possible bashism'): 52 if not output or not output.startswith('possible bashism'):
51 # Probably starts with or contains only warnings. Dump verbatim 53 # Probably starts with or contains only warnings. Dump verbatim
52 # with one space indention. Can't do the splitting and whitelist 54 # with one space indention. Can't do the splitting and allowed
53 # checking below. 55 # checking below.
54 return '\n'.join([filename, 56 return '\n'.join([filename,
55 ' Unexpected output from checkbashisms.pl'] + 57 ' Unexpected output from checkbashisms.pl'] +
@@ -65,7 +67,7 @@ def process(filename, function, lineno, script):
65 # ... 67 # ...
66 # ... 68 # ...
67 result = [] 69 result = []
68 # Check the results against the whitelist 70 # Check the results against the allowed list
69 for message, source in zip(output[0::2], output[1::2]): 71 for message, source in zip(output[0::2], output[1::2]):
70 if not is_whitelisted(source): 72 if not is_whitelisted(source):
71 if lineno is not None: 73 if lineno is not None:
diff --git a/scripts/wic b/scripts/wic
index 4bcff8f79c..9137208f5e 100755
--- a/scripts/wic
+++ b/scripts/wic
@@ -159,6 +159,9 @@ def wic_create_subcommand(options, usage_str):
159 "(Use -e/--image-name to specify it)") 159 "(Use -e/--image-name to specify it)")
160 native_sysroot = options.native_sysroot 160 native_sysroot = options.native_sysroot
161 161
162 if options.kernel_dir:
163 kernel_dir = options.kernel_dir
164
162 if not options.vars_dir and (not native_sysroot or not os.path.isdir(native_sysroot)): 165 if not options.vars_dir and (not native_sysroot or not os.path.isdir(native_sysroot)):
163 logger.info("Building wic-tools...\n") 166 logger.info("Building wic-tools...\n")
164 subprocess.check_call(["bitbake", "wic-tools"]) 167 subprocess.check_call(["bitbake", "wic-tools"])
@@ -206,7 +209,7 @@ def wic_create_subcommand(options, usage_str):
206 logger.info(" (Please check that the build artifacts for the machine") 209 logger.info(" (Please check that the build artifacts for the machine")
207 logger.info(" selected in local.conf actually exist and that they") 210 logger.info(" selected in local.conf actually exist and that they")
208 logger.info(" are the correct artifacts for the image (.wks file)).\n") 211 logger.info(" are the correct artifacts for the image (.wks file)).\n")
209 raise WicError("The artifact that couldn't be found was %s:\n %s", not_found, not_found_dir) 212 raise WicError("The artifact that couldn't be found was %s:\n %s" % (not_found, not_found_dir))
210 213
211 krootfs_dir = options.rootfs_dir 214 krootfs_dir = options.rootfs_dir
212 if krootfs_dir is None: 215 if krootfs_dir is None:
@@ -234,6 +237,13 @@ def wic_ls_subcommand(args, usage_str):
234 Command-line handling for list content of images. 237 Command-line handling for list content of images.
235 The real work is done by engine.wic_ls() 238 The real work is done by engine.wic_ls()
236 """ 239 """
240
241 if args.image_name:
242 BB_VARS.default_image = args.image_name
243
244 if args.vars_dir:
245 BB_VARS.vars_dir = args.vars_dir
246
237 engine.wic_ls(args, args.native_sysroot) 247 engine.wic_ls(args, args.native_sysroot)
238 248
239def wic_cp_subcommand(args, usage_str): 249def wic_cp_subcommand(args, usage_str):
@@ -241,6 +251,12 @@ def wic_cp_subcommand(args, usage_str):
241 Command-line handling for copying files/dirs to images. 251 Command-line handling for copying files/dirs to images.
242 The real work is done by engine.wic_cp() 252 The real work is done by engine.wic_cp()
243 """ 253 """
254 if args.image_name:
255 BB_VARS.default_image = args.image_name
256
257 if args.vars_dir:
258 BB_VARS.vars_dir = args.vars_dir
259
244 engine.wic_cp(args, args.native_sysroot) 260 engine.wic_cp(args, args.native_sysroot)
245 261
246def wic_rm_subcommand(args, usage_str): 262def wic_rm_subcommand(args, usage_str):
@@ -248,6 +264,12 @@ def wic_rm_subcommand(args, usage_str):
248 Command-line handling for removing files/dirs from images. 264 Command-line handling for removing files/dirs from images.
249 The real work is done by engine.wic_rm() 265 The real work is done by engine.wic_rm()
250 """ 266 """
267 if args.image_name:
268 BB_VARS.default_image = args.image_name
269
270 if args.vars_dir:
271 BB_VARS.vars_dir = args.vars_dir
272
251 engine.wic_rm(args, args.native_sysroot) 273 engine.wic_rm(args, args.native_sysroot)
252 274
253def wic_write_subcommand(args, usage_str): 275def wic_write_subcommand(args, usage_str):
@@ -255,6 +277,12 @@ def wic_write_subcommand(args, usage_str):
255 Command-line handling for writing images. 277 Command-line handling for writing images.
256 The real work is done by engine.wic_write() 278 The real work is done by engine.wic_write()
257 """ 279 """
280 if args.image_name:
281 BB_VARS.default_image = args.image_name
282
283 if args.vars_dir:
284 BB_VARS.vars_dir = args.vars_dir
285
258 engine.wic_write(args, args.native_sysroot) 286 engine.wic_write(args, args.native_sysroot)
259 287
260def wic_help_subcommand(args, usage_str): 288def wic_help_subcommand(args, usage_str):
@@ -387,6 +415,12 @@ def wic_init_parser_ls(subparser):
387 help="image spec: <image>[:<vfat partition>[<path>]]") 415 help="image spec: <image>[:<vfat partition>[<path>]]")
388 subparser.add_argument("-n", "--native-sysroot", 416 subparser.add_argument("-n", "--native-sysroot",
389 help="path to the native sysroot containing the tools") 417 help="path to the native sysroot containing the tools")
418 subparser.add_argument("-e", "--image-name", dest="image_name",
419 help="name of the image to use the artifacts from "
420 "e.g. core-image-sato")
421 subparser.add_argument("-v", "--vars", dest='vars_dir',
422 help="directory with <image>.env files that store "
423 "bitbake variables")
390 424
391def imgpathtype(arg): 425def imgpathtype(arg):
392 img = imgtype(arg) 426 img = imgtype(arg)
@@ -401,6 +435,12 @@ def wic_init_parser_cp(subparser):
401 help="image spec: <image>:<vfat partition>[<path>] or <file>") 435 help="image spec: <image>:<vfat partition>[<path>] or <file>")
402 subparser.add_argument("-n", "--native-sysroot", 436 subparser.add_argument("-n", "--native-sysroot",
403 help="path to the native sysroot containing the tools") 437 help="path to the native sysroot containing the tools")
438 subparser.add_argument("-e", "--image-name", dest="image_name",
439 help="name of the image to use the artifacts from "
440 "e.g. core-image-sato")
441 subparser.add_argument("-v", "--vars", dest='vars_dir',
442 help="directory with <image>.env files that store "
443 "bitbake variables")
404 444
405def wic_init_parser_rm(subparser): 445def wic_init_parser_rm(subparser):
406 subparser.add_argument("path", type=imgpathtype, 446 subparser.add_argument("path", type=imgpathtype,
@@ -410,6 +450,12 @@ def wic_init_parser_rm(subparser):
410 subparser.add_argument("-r", dest="recursive_delete", action="store_true", default=False, 450 subparser.add_argument("-r", dest="recursive_delete", action="store_true", default=False,
411 help="remove directories and their contents recursively, " 451 help="remove directories and their contents recursively, "
412 " this only applies to ext* partition") 452 " this only applies to ext* partition")
453 subparser.add_argument("-e", "--image-name", dest="image_name",
454 help="name of the image to use the artifacts from "
455 "e.g. core-image-sato")
456 subparser.add_argument("-v", "--vars", dest='vars_dir',
457 help="directory with <image>.env files that store "
458 "bitbake variables")
413 459
414def expandtype(rules): 460def expandtype(rules):
415 """ 461 """
@@ -451,6 +497,12 @@ def wic_init_parser_write(subparser):
451 help="expand rules: auto or <partition>:<size>[,<partition>:<size>]") 497 help="expand rules: auto or <partition>:<size>[,<partition>:<size>]")
452 subparser.add_argument("-n", "--native-sysroot", 498 subparser.add_argument("-n", "--native-sysroot",
453 help="path to the native sysroot containing the tools") 499 help="path to the native sysroot containing the tools")
500 subparser.add_argument("--image-name", dest="image_name",
501 help="name of the image to use the artifacts from "
502 "e.g. core-image-sato")
503 subparser.add_argument("-v", "--vars", dest='vars_dir',
504 help="directory with <image>.env files that store "
505 "bitbake variables")
454 506
455def wic_init_parser_help(subparser): 507def wic_init_parser_help(subparser):
456 helpparsers = subparser.add_subparsers(dest='help_topic', help=hlp.wic_usage) 508 helpparsers = subparser.add_subparsers(dest='help_topic', help=hlp.wic_usage)
diff --git a/scripts/yocto-check-layer b/scripts/yocto-check-layer
index 2445ad5e43..67cc71950f 100755
--- a/scripts/yocto-check-layer
+++ b/scripts/yocto-check-layer
@@ -24,7 +24,7 @@ import scriptpath
24scriptpath.add_oe_lib_path() 24scriptpath.add_oe_lib_path()
25scriptpath.add_bitbake_lib_path() 25scriptpath.add_bitbake_lib_path()
26 26
27from checklayer import LayerType, detect_layers, add_layers, add_layer_dependencies, get_layer_dependencies, get_signatures, check_bblayers 27from checklayer import LayerType, detect_layers, add_layers, add_layer_dependencies, get_layer_dependencies, get_signatures, check_bblayers, sanity_check_layers
28from oeqa.utils.commands import get_bb_vars 28from oeqa.utils.commands import get_bb_vars
29 29
30PROGNAME = 'yocto-check-layer' 30PROGNAME = 'yocto-check-layer'
@@ -41,6 +41,12 @@ def test_layer(td, layer, test_software_layer_signatures):
41 tc.loadTests(CASES_PATHS) 41 tc.loadTests(CASES_PATHS)
42 return tc.runTests() 42 return tc.runTests()
43 43
44def dump_layer_debug(layer):
45 logger.debug("Found layer %s (%s)" % (layer["name"], layer["path"]))
46 collections = layer.get("collections", {})
47 if collections:
48 logger.debug("%s collections: %s" % (layer["name"], ", ".join(collections)))
49
44def main(): 50def main():
45 parser = argparse.ArgumentParser( 51 parser = argparse.ArgumentParser(
46 description="Yocto Project layer checking tool", 52 description="Yocto Project layer checking tool",
@@ -106,6 +112,17 @@ def main():
106 else: 112 else:
107 dep_layers = layers 113 dep_layers = layers
108 114
115 logger.debug("Found additional layers:")
116 for l in additional_layers:
117 dump_layer_debug(l)
118 logger.debug("Found dependency layers:")
119 for l in dep_layers:
120 dump_layer_debug(l)
121
122 if not sanity_check_layers(additional_layers + dep_layers, logger):
123 logger.error("Failed layer validation")
124 return 1
125
109 logger.info("Detected layers:") 126 logger.info("Detected layers:")
110 for layer in layers: 127 for layer in layers:
111 if layer['type'] == LayerType.ERROR_BSP_DISTRO: 128 if layer['type'] == LayerType.ERROR_BSP_DISTRO:
@@ -151,14 +168,13 @@ def main():
151 168
152 layers_tested = 0 169 layers_tested = 0
153 for layer in layers: 170 for layer in layers:
154 if layer['type'] == LayerType.ERROR_NO_LAYER_CONF or \ 171 if layer['type'] in (LayerType.ERROR_NO_LAYER_CONF, LayerType.ERROR_BSP_DISTRO):
155 layer['type'] == LayerType.ERROR_BSP_DISTRO:
156 continue 172 continue
157 173
158 # Reset to a clean backup copy for each run 174 # Reset to a clean backup copy for each run
159 shutil.copyfile(bblayersconf + '.backup', bblayersconf) 175 shutil.copyfile(bblayersconf + '.backup', bblayersconf)
160 176
161 if check_bblayers(bblayersconf, layer['path'], logger): 177 if layer['type'] not in (LayerType.CORE, ) and check_bblayers(bblayersconf, layer['path'], logger):
162 logger.info("%s already in %s. To capture initial signatures, layer under test should not present " 178 logger.info("%s already in %s. To capture initial signatures, layer under test should not present "
163 "in BBLAYERS. Please remove %s from BBLAYERS." % (layer['name'], bblayersconf, layer['name'])) 179 "in BBLAYERS. Please remove %s from BBLAYERS." % (layer['name'], bblayersconf, layer['name']))
164 results[layer['name']] = None 180 results[layer['name']] = None
diff --git a/scripts/yocto_testresults_query.py b/scripts/yocto_testresults_query.py
new file mode 100755
index 0000000000..521ead8473
--- /dev/null
+++ b/scripts/yocto_testresults_query.py
@@ -0,0 +1,131 @@
1#!/usr/bin/env python3
2
3# Yocto Project test results management tool
4# This script is an thin layer over resulttool to manage tes results and regression reports.
5# Its main feature is to translate tags or branch names to revisions SHA1, and then to run resulttool
6# with those computed revisions
7#
8# Copyright (C) 2023 OpenEmbedded Contributors
9#
10# SPDX-License-Identifier: MIT
11#
12
13import sys
14import os
15import argparse
16import subprocess
17import tempfile
18import lib.scriptutils as scriptutils
19
20script_path = os.path.dirname(os.path.realpath(__file__))
21poky_path = os.path.abspath(os.path.join(script_path, ".."))
22resulttool = os.path.abspath(os.path.join(script_path, "resulttool"))
23logger = scriptutils.logger_create(sys.argv[0])
24testresults_default_url="git://git.yoctoproject.org/yocto-testresults"
25
26def create_workdir():
27 workdir = tempfile.mkdtemp(prefix='yocto-testresults-query.')
28 logger.info(f"Shallow-cloning testresults in {workdir}")
29 subprocess.check_call(["git", "clone", testresults_default_url, workdir, "--depth", "1"])
30 return workdir
31
32def get_sha1(pokydir, revision):
33 try:
34 rev = subprocess.check_output(["git", "rev-list", "-n", "1", revision], cwd=pokydir).decode('utf-8').strip()
35 logger.info(f"SHA-1 revision for {revision} in {pokydir} is {rev}")
36 return rev
37 except subprocess.CalledProcessError:
38 logger.error(f"Can not find SHA-1 for {revision} in {pokydir}")
39 return None
40
41def get_branch(tag):
42 # The tags in test results repository, as returned by git rev-list, have the following form:
43 # refs/tags/<branch>/<count>-g<sha1>/<num>
44 return '/'.join(tag.split("/")[2:-2])
45
46def fetch_testresults(workdir, sha1):
47 logger.info(f"Fetching test results for {sha1} in {workdir}")
48 rawtags = subprocess.check_output(["git", "ls-remote", "--refs", "--tags", "origin", f"*{sha1}*"], cwd=workdir).decode('utf-8').strip()
49 if not rawtags:
50 raise Exception(f"No reference found for commit {sha1} in {workdir}")
51 branch = ""
52 for rev in [rawtag.split()[1] for rawtag in rawtags.splitlines()]:
53 if not branch:
54 branch = get_branch(rev)
55 logger.info(f"Fetching matching revision: {rev}")
56 subprocess.check_call(["git", "fetch", "--depth", "1", "origin", f"{rev}:{rev}"], cwd=workdir)
57 return branch
58
59def compute_regression_report(workdir, basebranch, baserevision, targetbranch, targetrevision, args):
60 logger.info(f"Running resulttool regression between SHA1 {baserevision} and {targetrevision}")
61 command = [resulttool, "regression-git", "--branch", basebranch, "--commit", baserevision, "--branch2", targetbranch, "--commit2", targetrevision, workdir]
62 if args.limit:
63 command.extend(["-l", args.limit])
64 report = subprocess.check_output(command).decode("utf-8")
65 return report
66
67def print_report_with_header(report, baseversion, baserevision, targetversion, targetrevision):
68 print("========================== Regression report ==============================")
69 print(f'{"=> Target:": <16}{targetversion: <16}({targetrevision})')
70 print(f'{"=> Base:": <16}{baseversion: <16}({baserevision})')
71 print("===========================================================================\n")
72 print(report, end='')
73
74def regression(args):
75 logger.info(f"Compute regression report between {args.base} and {args.target}")
76 if args.testresultsdir:
77 workdir = args.testresultsdir
78 else:
79 workdir = create_workdir()
80
81 try:
82 baserevision = get_sha1(poky_path, args.base)
83 targetrevision = get_sha1(poky_path, args.target)
84 if not baserevision or not targetrevision:
85 logger.error("One or more revision(s) missing. You might be targeting nonexistant tags/branches, or are in wrong repository (you must use Poky and not oe-core)")
86 if not args.testresultsdir:
87 subprocess.check_call(["rm", "-rf", workdir])
88 sys.exit(1)
89 basebranch = fetch_testresults(workdir, baserevision)
90 targetbranch = fetch_testresults(workdir, targetrevision)
91 report = compute_regression_report(workdir, basebranch, baserevision, targetbranch, targetrevision, args)
92 print_report_with_header(report, args.base, baserevision, args.target, targetrevision)
93 finally:
94 if not args.testresultsdir:
95 subprocess.check_call(["rm", "-rf", workdir])
96
97def main():
98 parser = argparse.ArgumentParser(description="Yocto Project test results helper")
99 subparsers = parser.add_subparsers(
100 help="Supported commands for test results helper",
101 required=True)
102 parser_regression_report = subparsers.add_parser(
103 "regression-report",
104 help="Generate regression report between two fixed revisions. Revisions can be branch name or tag")
105 parser_regression_report.add_argument(
106 'base',
107 help="Revision or tag against which to compare results (i.e: the older)")
108 parser_regression_report.add_argument(
109 'target',
110 help="Revision or tag to compare against the base (i.e: the newer)")
111 parser_regression_report.add_argument(
112 '-t',
113 '--testresultsdir',
114 help=f"An existing test results directory. {sys.argv[0]} will automatically clone it and use default branch if not provided")
115 parser_regression_report.add_argument(
116 '-l',
117 '--limit',
118 help=f"Maximum number of changes to display per test. Can be set to 0 to print all changes")
119 parser_regression_report.set_defaults(func=regression)
120
121 args = parser.parse_args()
122 args.func(args)
123
124if __name__ == '__main__':
125 try:
126 ret = main()
127 except Exception:
128 ret = 1
129 import traceback
130 traceback.print_exc()
131 sys.exit(ret)