summaryrefslogtreecommitdiffstats
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rw-r--r--scripts/.oe-layers.json7
-rwxr-xr-xscripts/autobuilder-worker-prereq-tests28
-rwxr-xr-xscripts/bblock184
-rwxr-xr-xscripts/bitbake-prserv-tool9
-rwxr-xr-xscripts/bitbake-whatchanged320
-rwxr-xr-xscripts/buildhistory-collect-srcrevs6
-rwxr-xr-xscripts/buildhistory-diff5
-rwxr-xr-xscripts/buildstats-diff2
-rwxr-xr-xscripts/buildstats-summary126
-rwxr-xr-xscripts/combo-layer38
-rwxr-xr-xscripts/combo-layer-hook-default.sh2
-rwxr-xr-xscripts/contrib/bb-perf/bb-matrix-plot.sh4
-rwxr-xr-xscripts/contrib/bbvars.py6
-rwxr-xr-xscripts/contrib/build-perf-test-wrapper.sh15
-rwxr-xr-xscripts/contrib/convert-overrides.py155
-rwxr-xr-xscripts/contrib/convert-spdx-licenses.py145
-rwxr-xr-xscripts/contrib/convert-srcuri.py77
-rwxr-xr-xscripts/contrib/convert-variable-renames.py116
-rwxr-xr-xscripts/contrib/ddimage2
-rwxr-xr-xscripts/contrib/dialog-power-control2
-rwxr-xr-xscripts/contrib/documentation-audit.sh6
-rwxr-xr-xscripts/contrib/image-manifest523
-rwxr-xr-xscripts/contrib/list-packageconfig-flags.py2
-rwxr-xr-xscripts/contrib/oe-build-perf-report-email.py167
-rwxr-xr-xscripts/contrib/patchreview.py71
-rwxr-xr-xscripts/contrib/test_build_time.sh2
-rwxr-xr-xscripts/contrib/test_build_time_worker.sh2
-rwxr-xr-xscripts/contrib/verify-homepage.py2
-rwxr-xr-xscripts/cp-noerror2
-rwxr-xr-xscripts/create-pull-request9
l---------scripts/cross-intercept/ar1
-rwxr-xr-xscripts/crosstap8
-rwxr-xr-xscripts/devtool34
l---------scripts/esdk-tools/devtool1
l---------scripts/esdk-tools/oe-find-native-sysroot1
l---------scripts/esdk-tools/recipetool1
l---------scripts/esdk-tools/runqemu1
l---------scripts/esdk-tools/runqemu-addptable2image1
l---------scripts/esdk-tools/runqemu-export-rootfs1
l---------scripts/esdk-tools/runqemu-extract-sdk1
l---------scripts/esdk-tools/runqemu-gen-tapdevs1
l---------scripts/esdk-tools/runqemu-ifdown1
l---------scripts/esdk-tools/runqemu-ifup1
l---------scripts/esdk-tools/wic1
-rwxr-xr-xscripts/gen-lockedsig-cache6
-rwxr-xr-xscripts/git30
-rwxr-xr-xscripts/install-buildtools22
-rw-r--r--scripts/lib/argparse_oe.py2
-rw-r--r--scripts/lib/build_perf/report.py3
-rw-r--r--scripts/lib/buildstats.py38
-rw-r--r--scripts/lib/checklayer/__init__.py58
-rw-r--r--scripts/lib/checklayer/cases/bsp.py4
-rw-r--r--scripts/lib/checklayer/cases/common.py46
-rw-r--r--scripts/lib/checklayer/cases/distro.py2
-rw-r--r--scripts/lib/devtool/__init__.py27
-rw-r--r--scripts/lib/devtool/build_image.py2
-rw-r--r--scripts/lib/devtool/build_sdk.py2
-rw-r--r--scripts/lib/devtool/deploy.py240
-rw-r--r--scripts/lib/devtool/ide_plugins/__init__.py282
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_code.py463
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_none.py53
-rwxr-xr-xscripts/lib/devtool/ide_sdk.py1070
-rw-r--r--scripts/lib/devtool/menuconfig.py4
-rw-r--r--scripts/lib/devtool/sdk.py5
-rw-r--r--scripts/lib/devtool/search.py5
-rw-r--r--scripts/lib/devtool/standard.py540
-rw-r--r--scripts/lib/devtool/upgrade.py196
-rw-r--r--scripts/lib/recipetool/append.py78
-rw-r--r--scripts/lib/recipetool/create.py360
-rw-r--r--scripts/lib/recipetool/create_buildsys.py43
-rw-r--r--scripts/lib/recipetool/create_buildsys_python.py1100
-rw-r--r--scripts/lib/recipetool/create_go.py777
-rw-r--r--scripts/lib/recipetool/create_kmod.py2
-rw-r--r--scripts/lib/recipetool/create_npm.py103
-rw-r--r--scripts/lib/recipetool/licenses.csv37
-rw-r--r--scripts/lib/recipetool/setvar.py1
-rw-r--r--scripts/lib/resulttool/log.py13
-rw-r--r--scripts/lib/resulttool/regression.py281
-rw-r--r--scripts/lib/resulttool/report.py5
-rw-r--r--scripts/lib/resulttool/resultutils.py8
-rw-r--r--scripts/lib/scriptutils.py25
-rw-r--r--scripts/lib/wic/canned-wks/common.wks.inc2
-rw-r--r--scripts/lib/wic/canned-wks/directdisk-gpt.wks2
-rw-r--r--scripts/lib/wic/canned-wks/efi-bootdisk.wks.in2
-rw-r--r--scripts/lib/wic/canned-wks/mkefidisk.wks2
-rw-r--r--scripts/lib/wic/canned-wks/qemuloongarch.wks3
-rw-r--r--scripts/lib/wic/canned-wks/qemux86-directdisk.wks2
-rw-r--r--scripts/lib/wic/engine.py6
-rw-r--r--scripts/lib/wic/filemap.py7
-rw-r--r--scripts/lib/wic/help.py18
-rw-r--r--scripts/lib/wic/ksparser.py13
-rw-r--r--scripts/lib/wic/misc.py14
-rw-r--r--scripts/lib/wic/partition.py73
-rw-r--r--scripts/lib/wic/pluginbase.py8
-rw-r--r--scripts/lib/wic/plugins/imager/direct.py134
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-efi.py211
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-partition.py9
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-pcbios.py12
-rw-r--r--scripts/lib/wic/plugins/source/empty.py59
-rw-r--r--scripts/lib/wic/plugins/source/isoimage-isohybrid.py2
-rw-r--r--scripts/lib/wic/plugins/source/rawcopy.py42
-rw-r--r--scripts/lib/wic/plugins/source/rootfs.py13
-rwxr-xr-xscripts/lnr24
-rwxr-xr-xscripts/native-intercept/ar32
-rwxr-xr-xscripts/nativesdk-intercept/chgrp30
-rwxr-xr-xscripts/nativesdk-intercept/chown30
-rwxr-xr-xscripts/oe-buildenv-internal35
-rwxr-xr-xscripts/oe-check-sstate14
-rwxr-xr-xscripts/oe-debuginfod30
-rwxr-xr-xscripts/oe-depends-dot34
-rwxr-xr-xscripts/oe-find-native-sysroot15
-rwxr-xr-xscripts/oe-gnome-terminal-phonehome2
-rwxr-xr-xscripts/oe-pkgdata-browser13
-rwxr-xr-xscripts/oe-pkgdata-util47
-rwxr-xr-xscripts/oe-publish-sdk4
-rwxr-xr-xscripts/oe-pylint2
-rwxr-xr-xscripts/oe-setup-build122
-rwxr-xr-xscripts/oe-setup-builddir106
-rwxr-xr-xscripts/oe-setup-layers146
-rwxr-xr-xscripts/oe-setup-vscode93
-rwxr-xr-xscripts/oe-time-dd-test.sh106
-rwxr-xr-xscripts/oe-trim-schemas2
-rwxr-xr-xscripts/oepydevshell-internal.py4
-rwxr-xr-xscripts/opkg-query-helper.py2
-rwxr-xr-xscripts/patchtest232
-rwxr-xr-xscripts/patchtest-get-branch81
-rwxr-xr-xscripts/patchtest-get-series115
-rwxr-xr-xscripts/patchtest-send-results110
-rwxr-xr-xscripts/patchtest-setup-sharedir83
-rw-r--r--scripts/patchtest.README153
-rw-r--r--scripts/postinst-intercepts/update_gtk_icon_cache6
-rw-r--r--scripts/postinst-intercepts/update_mandb18
-rw-r--r--scripts/postinst-intercepts/update_udev_hwdb5
-rw-r--r--scripts/pybootchartgui/pybootchartgui/draw.py158
-rw-r--r--scripts/pybootchartgui/pybootchartgui/parsing.py35
-rw-r--r--scripts/pybootchartgui/pybootchartgui/samples.py25
-rwxr-xr-xscripts/pythondeps2
-rwxr-xr-xscripts/relocate_sdk.py35
-rwxr-xr-xscripts/rpm2cpio.sh30
-rwxr-xr-xscripts/runqemu625
-rwxr-xr-xscripts/runqemu-addptable2image2
-rwxr-xr-xscripts/runqemu-export-rootfs25
-rwxr-xr-xscripts/runqemu-extract-sdk2
-rwxr-xr-xscripts/runqemu-gen-tapdevs120
-rwxr-xr-xscripts/runqemu-ifdown42
-rwxr-xr-xscripts/runqemu-ifup65
-rw-r--r--scripts/runqemu.README16
-rwxr-xr-xscripts/sstate-cache-management.py329
-rwxr-xr-xscripts/sstate-cache-management.sh458
-rwxr-xr-xscripts/sstate-diff-machines.sh6
-rwxr-xr-xscripts/sstate-sysroot-cruft.sh14
-rwxr-xr-xscripts/sysroot-relativelinks.py2
-rwxr-xr-xscripts/task-time2
-rwxr-xr-xscripts/verify-bashisms14
-rwxr-xr-xscripts/wic11
-rwxr-xr-xscripts/yocto-check-layer52
-rwxr-xr-xscripts/yocto_testresults_query.py131
157 files changed, 9856 insertions, 2697 deletions
diff --git a/scripts/.oe-layers.json b/scripts/.oe-layers.json
new file mode 100644
index 0000000000..1b00a84b54
--- /dev/null
+++ b/scripts/.oe-layers.json
@@ -0,0 +1,7 @@
1{
2 "layers": [
3 "../meta-poky",
4 "../meta"
5 ],
6 "version": "1.0"
7}
diff --git a/scripts/autobuilder-worker-prereq-tests b/scripts/autobuilder-worker-prereq-tests
index 5d7e6e2601..54fd3c1004 100755
--- a/scripts/autobuilder-worker-prereq-tests
+++ b/scripts/autobuilder-worker-prereq-tests
@@ -1,5 +1,7 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# Script which can be run on new autobuilder workers to check all needed configuration is present. 5# Script which can be run on new autobuilder workers to check all needed configuration is present.
4# Designed to be run in a repo where bitbake/oe-core are already present. 6# Designed to be run in a repo where bitbake/oe-core are already present.
5# 7#
@@ -35,6 +37,11 @@ if [ "$?" != "0" ]; then
35 echo "Please set git config --global user.email" 37 echo "Please set git config --global user.email"
36 exit 1 38 exit 1
37fi 39fi
40python3 -c "import jinja2"
41if [ "$?" != "0" ]; then
42 echo "Please ensure jinja2 is available"
43 exit 1
44fi
38bitbake -p 45bitbake -p
39if [ "$?" != "0" ]; then 46if [ "$?" != "0" ]; then
40 echo "Bitbake parsing failed" 47 echo "Bitbake parsing failed"
@@ -46,16 +53,31 @@ if (( $WATCHES < 65000 )); then
46 echo 'Need to increase watches (echo fs.inotify.max_user_watches=65536 | sudo tee -a /etc/sysctl.conf' 53 echo 'Need to increase watches (echo fs.inotify.max_user_watches=65536 | sudo tee -a /etc/sysctl.conf'
47 exit 1 54 exit 1
48fi 55fi
56OPEN_FILES=$(ulimit -n)
57if (( $OPEN_FILES < 65535 )); then
58 echo 'Increase maximum open files in /etc/security/limits.conf'
59 echo '* soft nofile 131072'
60 echo '* hard nofile 131072'
61 exit 1
62fi
63MAX_PROCESSES=$(ulimit -u)
64if (( $MAX_PROCESSES < 514542 )); then
65 echo 'Increase maximum user processes in /etc/security/limits.conf'
66 echo '* hard nproc 515294'
67 echo '* soft nproc 514543'
68 exit 1
69fi
70
49mkdir -p tmp/deploy/images/qemux86-64 71mkdir -p tmp/deploy/images/qemux86-64
50pushd tmp/deploy/images/qemux86-64 72pushd tmp/deploy/images/qemux86-64
51if [ ! -e core-image-minimal-qemux86-64.ext4 ]; then 73if [ ! -e core-image-minimal-qemux86-64.ext4 ]; then
52 wget http://downloads.yoctoproject.org/releases/yocto/yocto-2.5.1/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.ext4 74 wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.ext4
53fi 75fi
54if [ ! -e core-image-minimal-qemux86-64.qemuboot.conf ]; then 76if [ ! -e core-image-minimal-qemux86-64.qemuboot.conf ]; then
55 wget http://downloads.yoctoproject.org/releases/yocto/yocto-2.5.1/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.qemuboot.conf 77 wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.qemuboot.conf
56fi 78fi
57if [ ! -e bzImage-qemux86-64.bin ]; then 79if [ ! -e bzImage-qemux86-64.bin ]; then
58 wget http://downloads.yoctoproject.org/releases/yocto/yocto-2.5.1/machines/qemu/qemux86-64/bzImage-qemux86-64.bin 80 wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/bzImage-qemux86-64.bin
59fi 81fi
60popd 82popd
61bitbake qemu-helper-native 83bitbake qemu-helper-native
diff --git a/scripts/bblock b/scripts/bblock
new file mode 100755
index 0000000000..0082059af8
--- /dev/null
+++ b/scripts/bblock
@@ -0,0 +1,184 @@
1#!/usr/bin/env python3
2# bblock
3# lock/unlock task to latest signature
4#
5# Copyright (c) 2023 BayLibre, SAS
6# Author: Julien Stepahn <jstephan@baylibre.com>
7#
8# SPDX-License-Identifier: GPL-2.0-only
9#
10
11import os
12import sys
13import logging
14
15scripts_path = os.path.dirname(os.path.realpath(__file__))
16lib_path = scripts_path + "/lib"
17sys.path = sys.path + [lib_path]
18
19import scriptpath
20
21scriptpath.add_bitbake_lib_path()
22
23import bb.tinfoil
24import bb.msg
25
26import argparse_oe
27
28myname = os.path.basename(sys.argv[0])
29logger = bb.msg.logger_create(myname)
30
31
32def getTaskSignatures(tinfoil, pn, tasks):
33 tinfoil.set_event_mask(
34 [
35 "bb.event.GetTaskSignatureResult",
36 "logging.LogRecord",
37 "bb.command.CommandCompleted",
38 "bb.command.CommandFailed",
39 ]
40 )
41 ret = tinfoil.run_command("getTaskSignatures", pn, tasks)
42 if ret:
43 while True:
44 event = tinfoil.wait_event(1)
45 if event:
46 if isinstance(event, bb.command.CommandCompleted):
47 break
48 elif isinstance(event, bb.command.CommandFailed):
49 logger.error(str(event))
50 sys.exit(2)
51 elif isinstance(event, bb.event.GetTaskSignatureResult):
52 sig = event.sig
53 elif isinstance(event, logging.LogRecord):
54 logger.handle(event)
55 else:
56 logger.error("No result returned from getTaskSignatures command")
57 sys.exit(2)
58 return sig
59
60
61def parseRecipe(tinfoil, recipe):
62 try:
63 tinfoil.parse_recipes()
64 d = tinfoil.parse_recipe(recipe)
65 except Exception:
66 logger.error("Failed to get recipe info for: %s" % recipe)
67 sys.exit(1)
68 return d
69
70
71def bblockDump(lockfile):
72 try:
73 with open(lockfile, "r") as lockfile:
74 for line in lockfile:
75 print(line.strip())
76 except IOError:
77 return 1
78 return 0
79
80
81def bblockReset(lockfile, pns, package_archs, tasks):
82 if not pns:
83 logger.info("Unlocking all recipes")
84 try:
85 os.remove(lockfile)
86 except FileNotFoundError:
87 pass
88 else:
89 logger.info("Unlocking {pns}".format(pns=pns))
90 tmp_lockfile = lockfile + ".tmp"
91 with open(lockfile, "r") as infile, open(tmp_lockfile, "w") as outfile:
92 for line in infile:
93 if not (
94 any(element in line for element in pns)
95 and any(element in line for element in package_archs.split())
96 ):
97 outfile.write(line)
98 else:
99 if tasks and not any(element in line for element in tasks):
100 outfile.write(line)
101 os.remove(lockfile)
102 os.rename(tmp_lockfile, lockfile)
103
104
105def main():
106 parser = argparse_oe.ArgumentParser(description="Lock and unlock a recipe")
107 parser.add_argument("pn", nargs="*", help="Space separated list of recipe to lock")
108 parser.add_argument(
109 "-t",
110 "--tasks",
111 help="Comma separated list of tasks",
112 type=lambda s: [
113 task if task.startswith("do_") else "do_" + task for task in s.split(",")
114 ],
115 )
116 parser.add_argument(
117 "-r",
118 "--reset",
119 action="store_true",
120 help="Unlock pn recipes, or all recipes if pn is empty",
121 )
122 parser.add_argument(
123 "-d",
124 "--dump",
125 action="store_true",
126 help="Dump generated bblock.conf file",
127 )
128
129 global_args, unparsed_args = parser.parse_known_args()
130
131 with bb.tinfoil.Tinfoil() as tinfoil:
132 tinfoil.prepare(config_only=True)
133
134 package_archs = tinfoil.config_data.getVar("PACKAGE_ARCHS")
135 builddir = tinfoil.config_data.getVar("TOPDIR")
136 lockfile = "{builddir}/conf/bblock.conf".format(builddir=builddir)
137
138 if global_args.dump:
139 bblockDump(lockfile)
140 return 0
141
142 if global_args.reset:
143 bblockReset(lockfile, global_args.pn, package_archs, global_args.tasks)
144 return 0
145
146 with open(lockfile, "a") as lockfile:
147 s = ""
148 if lockfile.tell() == 0:
149 s = "# Generated by bblock\n"
150 s += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "info"\n'
151 s += 'SIGGEN_LOCKEDSIGS_TYPES += "${PACKAGE_ARCHS}"\n'
152 s += "\n"
153
154 for pn in global_args.pn:
155 d = parseRecipe(tinfoil, pn)
156 package_arch = d.getVar("PACKAGE_ARCH")
157 siggen_locked_sigs_package_arch = d.getVar(
158 "SIGGEN_LOCKEDSIGS_{package_arch}".format(package_arch=package_arch)
159 )
160 sigs = getTaskSignatures(tinfoil, [pn], global_args.tasks)
161 for sig in sigs:
162 new_entry = "{pn}:{taskname}:{sig}".format(
163 pn=sig[0], taskname=sig[1], sig=sig[2]
164 )
165 if (
166 siggen_locked_sigs_package_arch
167 and not new_entry in siggen_locked_sigs_package_arch
168 ) or not siggen_locked_sigs_package_arch:
169 s += 'SIGGEN_LOCKEDSIGS_{package_arch} += "{new_entry}"\n'.format(
170 package_arch=package_arch, new_entry=new_entry
171 )
172 lockfile.write(s)
173 return 0
174
175
176if __name__ == "__main__":
177 try:
178 ret = main()
179 except Exception:
180 ret = 1
181 import traceback
182
183 traceback.print_exc()
184 sys.exit(ret)
diff --git a/scripts/bitbake-prserv-tool b/scripts/bitbake-prserv-tool
index e55d98c72e..80028342b1 100755
--- a/scripts/bitbake-prserv-tool
+++ b/scripts/bitbake-prserv-tool
@@ -1,5 +1,7 @@
1#!/usr/bin/env bash 1#!/usr/bin/env bash
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
@@ -15,8 +17,11 @@ help ()
15clean_cache() 17clean_cache()
16{ 18{
17 s=`bitbake -e | grep ^CACHE= | cut -f2 -d\"` 19 s=`bitbake -e | grep ^CACHE= | cut -f2 -d\"`
20 # Stop any active memory resident server
21 bitbake -m
22 # Remove cache entries since we want to trigger a full reparse
18 if [ "x${s}" != "x" ]; then 23 if [ "x${s}" != "x" ]; then
19 rm -rf ${s} 24 rm -f ${s}/bb_cache*.dat.*
20 fi 25 fi
21} 26}
22 27
@@ -60,7 +65,7 @@ do_migrate_localcount ()
60 return 1 65 return 1
61 fi 66 fi
62 67
63 rm -rf $df 68 rm -f $df
64 clean_cache 69 clean_cache
65 echo "Exporting LOCALCOUNT to AUTOINCs..." 70 echo "Exporting LOCALCOUNT to AUTOINCs..."
66 bitbake -R conf/migrate_localcount.conf -p 71 bitbake -R conf/migrate_localcount.conf -p
diff --git a/scripts/bitbake-whatchanged b/scripts/bitbake-whatchanged
deleted file mode 100755
index 3095dafa46..0000000000
--- a/scripts/bitbake-whatchanged
+++ /dev/null
@@ -1,320 +0,0 @@
1#!/usr/bin/env python3
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4
5# Copyright (c) 2013 Wind River Systems, Inc.
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import os
11import sys
12import getopt
13import shutil
14import re
15import warnings
16import subprocess
17import argparse
18
19scripts_path = os.path.abspath(os.path.dirname(os.path.abspath(sys.argv[0])))
20lib_path = scripts_path + '/lib'
21sys.path = sys.path + [lib_path]
22
23import scriptpath
24
25# Figure out where is the bitbake/lib/bb since we need bb.siggen and bb.process
26bitbakepath = scriptpath.add_bitbake_lib_path()
27if not bitbakepath:
28 sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n")
29 sys.exit(1)
30scriptpath.add_oe_lib_path()
31import argparse_oe
32
33import bb.siggen
34import bb.process
35
36# Match the stamp's filename
37# group(1): PE_PV (may no PE)
38# group(2): PR
39# group(3): TASK
40# group(4): HASH
41stamp_re = re.compile("(?P<pv>.*)-(?P<pr>r\d+)\.(?P<task>do_\w+)\.(?P<hash>[^\.]*)")
42sigdata_re = re.compile(".*\.sigdata\..*")
43
44def gen_dict(stamps):
45 """
46 Generate the dict from the stamps dir.
47 The output dict format is:
48 {fake_f: {pn: PN, pv: PV, pr: PR, task: TASK, path: PATH}}
49 Where:
50 fake_f: pv + task + hash
51 path: the path to the stamp file
52 """
53 # The member of the sub dict (A "path" will be appended below)
54 sub_mem = ("pv", "pr", "task")
55 d = {}
56 for dirpath, _, files in os.walk(stamps):
57 for f in files:
58 # The "bitbake -S" would generate ".sigdata", but no "_setscene".
59 fake_f = re.sub('_setscene.', '.', f)
60 fake_f = re.sub('.sigdata', '', fake_f)
61 subdict = {}
62 tmp = stamp_re.match(fake_f)
63 if tmp:
64 for i in sub_mem:
65 subdict[i] = tmp.group(i)
66 if len(subdict) != 0:
67 pn = os.path.basename(dirpath)
68 subdict['pn'] = pn
69 # The path will be used by os.stat() and bb.siggen
70 subdict['path'] = dirpath + "/" + f
71 fake_f = tmp.group('pv') + tmp.group('task') + tmp.group('hash')
72 d[fake_f] = subdict
73 return d
74
75# Re-construct the dict
76def recon_dict(dict_in):
77 """
78 The output dict format is:
79 {pn_task: {pv: PV, pr: PR, path: PATH}}
80 """
81 dict_out = {}
82 for k in dict_in.keys():
83 subdict = {}
84 # The key
85 pn_task = "%s_%s" % (dict_in.get(k).get('pn'), dict_in.get(k).get('task'))
86 # If more than one stamps are found, use the latest one.
87 if pn_task in dict_out:
88 full_path_pre = dict_out.get(pn_task).get('path')
89 full_path_cur = dict_in.get(k).get('path')
90 if os.stat(full_path_pre).st_mtime > os.stat(full_path_cur).st_mtime:
91 continue
92 subdict['pv'] = dict_in.get(k).get('pv')
93 subdict['pr'] = dict_in.get(k).get('pr')
94 subdict['path'] = dict_in.get(k).get('path')
95 dict_out[pn_task] = subdict
96
97 return dict_out
98
99def split_pntask(s):
100 """
101 Split the pn_task in to (pn, task) and return it
102 """
103 tmp = re.match("(.*)_(do_.*)", s)
104 return (tmp.group(1), tmp.group(2))
105
106
107def print_added(d_new = None, d_old = None):
108 """
109 Print the newly added tasks
110 """
111 added = {}
112 for k in list(d_new.keys()):
113 if k not in d_old:
114 # Add the new one to added dict, and remove it from
115 # d_new, so the remaining ones are the changed ones
116 added[k] = d_new.get(k)
117 del(d_new[k])
118
119 if not added:
120 return 0
121
122 # Format the output, the dict format is:
123 # {pn: task1, task2 ...}
124 added_format = {}
125 counter = 0
126 for k in added.keys():
127 pn, task = split_pntask(k)
128 if pn in added_format:
129 # Append the value
130 added_format[pn] = "%s %s" % (added_format.get(pn), task)
131 else:
132 added_format[pn] = task
133 counter += 1
134 print("=== Newly added tasks: (%s tasks)" % counter)
135 for k in added_format.keys():
136 print(" %s: %s" % (k, added_format.get(k)))
137
138 return counter
139
140def print_vrchanged(d_new = None, d_old = None, vr = None):
141 """
142 Print the pv or pr changed tasks.
143 The arg "vr" is "pv" or "pr"
144 """
145 pvchanged = {}
146 counter = 0
147 for k in list(d_new.keys()):
148 if d_new.get(k).get(vr) != d_old.get(k).get(vr):
149 counter += 1
150 pn, task = split_pntask(k)
151 if pn not in pvchanged:
152 # Format the output, we only print pn (no task) since
153 # all the tasks would be changed when pn or pr changed,
154 # the dict format is:
155 # {pn: pv/pr_old -> pv/pr_new}
156 pvchanged[pn] = "%s -> %s" % (d_old.get(k).get(vr), d_new.get(k).get(vr))
157 del(d_new[k])
158
159 if not pvchanged:
160 return 0
161
162 print("\n=== %s changed: (%s tasks)" % (vr.upper(), counter))
163 for k in pvchanged.keys():
164 print(" %s: %s" % (k, pvchanged.get(k)))
165
166 return counter
167
168def print_depchanged(d_new = None, d_old = None, verbose = False):
169 """
170 Print the dependency changes
171 """
172 depchanged = {}
173 counter = 0
174 for k in d_new.keys():
175 counter += 1
176 pn, task = split_pntask(k)
177 if (verbose):
178 full_path_old = d_old.get(k).get("path")
179 full_path_new = d_new.get(k).get("path")
180 # No counter since it is not ready here
181 if sigdata_re.match(full_path_old) and sigdata_re.match(full_path_new):
182 output = bb.siggen.compare_sigfiles(full_path_old, full_path_new)
183 if output:
184 print("\n=== The verbose changes of %s.%s:" % (pn, task))
185 print('\n'.join(output))
186 else:
187 # Format the output, the format is:
188 # {pn: task1, task2, ...}
189 if pn in depchanged:
190 depchanged[pn] = "%s %s" % (depchanged.get(pn), task)
191 else:
192 depchanged[pn] = task
193
194 if len(depchanged) > 0:
195 print("\n=== Dependencies changed: (%s tasks)" % counter)
196 for k in depchanged.keys():
197 print(" %s: %s" % (k, depchanged[k]))
198
199 return counter
200
201
202def main():
203 """
204 Print what will be done between the current and last builds:
205 1) Run "STAMPS_DIR=<path> bitbake -S recipe" to re-generate the stamps
206 2) Figure out what are newly added and changed, can't figure out
207 what are removed since we can't know the previous stamps
208 clearly, for example, if there are several builds, we can't know
209 which stamps the last build has used exactly.
210 3) Use bb.siggen.compare_sigfiles to diff the old and new stamps
211 """
212
213 parser = argparse_oe.ArgumentParser(usage = """%(prog)s [options] [package ...]
214print what will be done between the current and last builds, for example:
215
216 $ bitbake core-image-sato
217 # Edit the recipes
218 $ bitbake-whatchanged core-image-sato
219
220The changes will be printed"
221
222Note:
223 The amount of tasks is not accurate when the task is "do_build" since
224 it usually depends on other tasks.
225 The "nostamp" task is not included.
226"""
227)
228 parser.add_argument("recipe", help="recipe to check")
229 parser.add_argument("-v", "--verbose", help = "print the verbose changes", action = "store_true")
230 args = parser.parse_args()
231
232 # Get the STAMPS_DIR
233 print("Figuring out the STAMPS_DIR ...")
234 cmdline = "bitbake -e | sed -ne 's/^STAMPS_DIR=\"\(.*\)\"/\\1/p'"
235 try:
236 stampsdir, err = bb.process.run(cmdline)
237 except:
238 raise
239 if not stampsdir:
240 print("ERROR: No STAMPS_DIR found for '%s'" % args.recipe, file=sys.stderr)
241 return 2
242 stampsdir = stampsdir.rstrip("\n")
243 if not os.path.isdir(stampsdir):
244 print("ERROR: stamps directory \"%s\" not found!" % stampsdir, file=sys.stderr)
245 return 2
246
247 # The new stamps dir
248 new_stampsdir = stampsdir + ".bbs"
249 if os.path.exists(new_stampsdir):
250 print("ERROR: %s already exists!" % new_stampsdir, file=sys.stderr)
251 return 2
252
253 try:
254 # Generate the new stamps dir
255 print("Generating the new stamps ... (need several minutes)")
256 cmdline = "STAMPS_DIR=%s bitbake -S none %s" % (new_stampsdir, args.recipe)
257 # FIXME
258 # The "bitbake -S" may fail, not fatal error, the stamps will still
259 # be generated, this might be a bug of "bitbake -S".
260 try:
261 bb.process.run(cmdline)
262 except Exception as exc:
263 print(exc)
264
265 # The dict for the new and old stamps.
266 old_dict = gen_dict(stampsdir)
267 new_dict = gen_dict(new_stampsdir)
268
269 # Remove the same one from both stamps.
270 cnt_unchanged = 0
271 for k in list(new_dict.keys()):
272 if k in old_dict:
273 cnt_unchanged += 1
274 del(new_dict[k])
275 del(old_dict[k])
276
277 # Re-construct the dict to easily find out what is added or changed.
278 # The dict format is:
279 # {pn_task: {pv: PV, pr: PR, path: PATH}}
280 new_recon = recon_dict(new_dict)
281 old_recon = recon_dict(old_dict)
282
283 del new_dict
284 del old_dict
285
286 # Figure out what are changed, the new_recon would be changed
287 # by the print_xxx function.
288 # Newly added
289 cnt_added = print_added(new_recon, old_recon)
290
291 # PV (including PE) and PR changed
292 # Let the bb.siggen handle them if verbose
293 cnt_rv = {}
294 if not args.verbose:
295 for i in ('pv', 'pr'):
296 cnt_rv[i] = print_vrchanged(new_recon, old_recon, i)
297
298 # Dependencies changed (use bitbake-diffsigs)
299 cnt_dep = print_depchanged(new_recon, old_recon, args.verbose)
300
301 total_changed = cnt_added + (cnt_rv.get('pv') or 0) + (cnt_rv.get('pr') or 0) + cnt_dep
302
303 print("\n=== Summary: (%s changed, %s unchanged)" % (total_changed, cnt_unchanged))
304 if args.verbose:
305 print("Newly added: %s\nDependencies changed: %s\n" % \
306 (cnt_added, cnt_dep))
307 else:
308 print("Newly added: %s\nPV changed: %s\nPR changed: %s\nDependencies changed: %s\n" % \
309 (cnt_added, cnt_rv.get('pv') or 0, cnt_rv.get('pr') or 0, cnt_dep))
310 except:
311 print("ERROR occurred!")
312 raise
313 finally:
314 # Remove the newly generated stamps dir
315 if os.path.exists(new_stampsdir):
316 print("Removing the newly generated stamps dir ...")
317 shutil.rmtree(new_stampsdir)
318
319if __name__ == "__main__":
320 sys.exit(main())
diff --git a/scripts/buildhistory-collect-srcrevs b/scripts/buildhistory-collect-srcrevs
index 340bee78bb..c937e49c2a 100755
--- a/scripts/buildhistory-collect-srcrevs
+++ b/scripts/buildhistory-collect-srcrevs
@@ -53,7 +53,7 @@ def main():
53 sys.exit(1) 53 sys.exit(1)
54 54
55 if options.forcevariable: 55 if options.forcevariable:
56 forcevariable = '_forcevariable' 56 forcevariable = ':forcevariable'
57 else: 57 else:
58 forcevariable = '' 58 forcevariable = ''
59 59
@@ -99,9 +99,9 @@ def main():
99 print('# %s' % curdir) 99 print('# %s' % curdir)
100 for pn, name, srcrev in srcrevs: 100 for pn, name, srcrev in srcrevs:
101 if name: 101 if name:
102 print('SRCREV_%s_pn-%s%s = "%s"' % (name, pn, forcevariable, srcrev)) 102 print('SRCREV_%s:pn-%s%s = "%s"' % (name, pn, forcevariable, srcrev))
103 else: 103 else:
104 print('SRCREV_pn-%s%s = "%s"' % (pn, forcevariable, srcrev)) 104 print('SRCREV:pn-%s%s = "%s"' % (pn, forcevariable, srcrev))
105 105
106 106
107if __name__ == "__main__": 107if __name__ == "__main__":
diff --git a/scripts/buildhistory-diff b/scripts/buildhistory-diff
index 3bd40a2a1e..a6e785aa23 100755
--- a/scripts/buildhistory-diff
+++ b/scripts/buildhistory-diff
@@ -11,7 +11,6 @@
11import sys 11import sys
12import os 12import os
13import argparse 13import argparse
14from distutils.version import LooseVersion
15 14
16# Ensure PythonGit is installed (buildhistory_analysis needs it) 15# Ensure PythonGit is installed (buildhistory_analysis needs it)
17try: 16try:
@@ -73,10 +72,6 @@ def main():
73 parser = get_args_parser() 72 parser = get_args_parser()
74 args = parser.parse_args() 73 args = parser.parse_args()
75 74
76 if LooseVersion(git.__version__) < '0.3.1':
77 sys.stderr.write("Version of GitPython is too old, please install GitPython (python-git) 0.3.1 or later in order to use this script\n")
78 sys.exit(1)
79
80 if len(args.revisions) > 2: 75 if len(args.revisions) > 2:
81 sys.stderr.write('Invalid argument(s) specified: %s\n\n' % ' '.join(args.revisions[2:])) 76 sys.stderr.write('Invalid argument(s) specified: %s\n\n' % ' '.join(args.revisions[2:]))
82 parser.print_help() 77 parser.print_help()
diff --git a/scripts/buildstats-diff b/scripts/buildstats-diff
index 2f6498ab67..c9aa76a8fa 100755
--- a/scripts/buildstats-diff
+++ b/scripts/buildstats-diff
@@ -1,4 +1,4 @@
1#!/usr/bin/python3 1#!/usr/bin/env python3
2# 2#
3# Script for comparing buildstats from two different builds 3# Script for comparing buildstats from two different builds
4# 4#
diff --git a/scripts/buildstats-summary b/scripts/buildstats-summary
new file mode 100755
index 0000000000..b10c671b29
--- /dev/null
+++ b/scripts/buildstats-summary
@@ -0,0 +1,126 @@
1#!/usr/bin/env python3
2#
3# Dump a summary of the specified buildstats to the terminal, filtering and
4# sorting by walltime.
5#
6# SPDX-License-Identifier: GPL-2.0-only
7
8import argparse
9import dataclasses
10import datetime
11import enum
12import os
13import pathlib
14import sys
15
16scripts_path = os.path.dirname(os.path.realpath(__file__))
17sys.path.append(os.path.join(scripts_path, "lib"))
18import buildstats
19
20
21@dataclasses.dataclass
22class Task:
23 recipe: str
24 task: str
25 start: datetime.datetime
26 duration: datetime.timedelta
27
28
29class Sorting(enum.Enum):
30 start = 1
31 duration = 2
32
33 # argparse integration
34 def __str__(self) -> str:
35 return self.name
36
37 def __repr__(self) -> str:
38 return self.name
39
40 @staticmethod
41 def from_string(s: str):
42 try:
43 return Sorting[s]
44 except KeyError:
45 return s
46
47
48def read_buildstats(path: pathlib.Path) -> buildstats.BuildStats:
49 if not path.exists():
50 raise Exception(f"No such file or directory: {path}")
51 if path.is_file():
52 return buildstats.BuildStats.from_file_json(path)
53 if (path / "build_stats").is_file():
54 return buildstats.BuildStats.from_dir(path)
55 raise Exception(f"Cannot find buildstats in {path}")
56
57
58def dump_buildstats(args, bs: buildstats.BuildStats):
59 tasks = []
60 for recipe in bs.values():
61 for task, stats in recipe.tasks.items():
62 t = Task(
63 recipe.name,
64 task,
65 datetime.datetime.fromtimestamp(stats["start_time"]),
66 datetime.timedelta(seconds=int(stats.walltime)),
67 )
68 tasks.append(t)
69
70 tasks.sort(key=lambda t: getattr(t, args.sort.name))
71
72 minimum = datetime.timedelta(seconds=args.shortest)
73 highlight = datetime.timedelta(seconds=args.highlight)
74
75 for t in tasks:
76 if t.duration >= minimum:
77 line = f"{t.duration} {t.recipe}:{t.task}"
78 if args.highlight and t.duration >= highlight:
79 print(f"\033[1m{line}\033[0m")
80 else:
81 print(line)
82
83
84def main(argv=None) -> int:
85 parser = argparse.ArgumentParser(
86 formatter_class=argparse.ArgumentDefaultsHelpFormatter
87 )
88
89 parser.add_argument(
90 "buildstats", metavar="BUILDSTATS", help="Buildstats file", type=pathlib.Path
91 )
92 parser.add_argument(
93 "--sort",
94 "-s",
95 type=Sorting.from_string,
96 choices=list(Sorting),
97 default=Sorting.start,
98 help="Sort tasks",
99 )
100 parser.add_argument(
101 "--shortest",
102 "-t",
103 type=int,
104 default=1,
105 metavar="SECS",
106 help="Hide tasks shorter than SECS seconds",
107 )
108 parser.add_argument(
109 "--highlight",
110 "-g",
111 type=int,
112 default=60,
113 metavar="SECS",
114 help="Highlight tasks longer than SECS seconds (0 disabled)",
115 )
116
117 args = parser.parse_args(argv)
118
119 bs = read_buildstats(args.buildstats)
120 dump_buildstats(args, bs)
121
122 return 0
123
124
125if __name__ == "__main__":
126 sys.exit(main())
diff --git a/scripts/combo-layer b/scripts/combo-layer
index a634dd69d2..4a715914af 100755
--- a/scripts/combo-layer
+++ b/scripts/combo-layer
@@ -19,9 +19,8 @@ import tempfile
19import configparser 19import configparser
20import re 20import re
21import copy 21import copy
22import pipes 22import shlex
23import shutil 23import shutil
24from collections import OrderedDict
25from string import Template 24from string import Template
26from functools import reduce 25from functools import reduce
27 26
@@ -192,6 +191,23 @@ def runcmd(cmd,destdir=None,printerr=True,out=None,env=None):
192 logger.debug("output: %s" % output.replace(chr(0), '\\0')) 191 logger.debug("output: %s" % output.replace(chr(0), '\\0'))
193 return output 192 return output
194 193
194def action_sync_revs(conf, args):
195 """
196 Update the last_revision config option for each repo with the latest
197 revision in the remote's branch. Useful if multiple people are using
198 combo-layer.
199 """
200 repos = get_repos(conf, args[1:])
201
202 for name in repos:
203 repo = conf.repos[name]
204 ldir = repo['local_repo_dir']
205 branch = repo.get('branch', "master")
206 runcmd("git fetch", ldir)
207 lastrev = runcmd('git rev-parse origin/%s' % branch, ldir).strip()
208 print("Updating %s to %s" % (name, lastrev))
209 conf.update(name, "last_revision", lastrev)
210
195def action_init(conf, args): 211def action_init(conf, args):
196 """ 212 """
197 Clone component repositories 213 Clone component repositories
@@ -467,7 +483,7 @@ def check_repo_clean(repodir):
467 exit if repo is dirty 483 exit if repo is dirty
468 """ 484 """
469 output=runcmd("git status --porcelain", repodir) 485 output=runcmd("git status --porcelain", repodir)
470 r = re.compile('\?\? patch-.*/') 486 r = re.compile(r'\?\? patch-.*/')
471 dirtyout = [item for item in output.splitlines() if not r.match(item)] 487 dirtyout = [item for item in output.splitlines() if not r.match(item)]
472 if dirtyout: 488 if dirtyout:
473 logger.error("git repo %s is dirty, please fix it first", repodir) 489 logger.error("git repo %s is dirty, please fix it first", repodir)
@@ -508,7 +524,7 @@ def check_patch(patchfile):
508 f.close() 524 f.close()
509 if of: 525 if of:
510 of.close() 526 of.close()
511 os.rename(patchfile + '.tmp', patchfile) 527 os.rename(of.name, patchfile)
512 528
513def drop_to_shell(workdir=None): 529def drop_to_shell(workdir=None):
514 if not sys.stdin.isatty(): 530 if not sys.stdin.isatty():
@@ -1259,7 +1275,7 @@ def apply_commit(parent, rev, largs, wargs, dest_dir, file_filter=None):
1259 target = os.path.join(wargs["destdir"], dest_dir) 1275 target = os.path.join(wargs["destdir"], dest_dir)
1260 if not os.path.isdir(target): 1276 if not os.path.isdir(target):
1261 os.makedirs(target) 1277 os.makedirs(target)
1262 quoted_target = pipes.quote(target) 1278 quoted_target = shlex.quote(target)
1263 # os.sysconf('SC_ARG_MAX') is lying: running a command with 1279 # os.sysconf('SC_ARG_MAX') is lying: running a command with
1264 # string length 629343 already failed with "Argument list too 1280 # string length 629343 already failed with "Argument list too
1265 # long" although SC_ARG_MAX = 2097152. "man execve" explains 1281 # long" although SC_ARG_MAX = 2097152. "man execve" explains
@@ -1271,7 +1287,7 @@ def apply_commit(parent, rev, largs, wargs, dest_dir, file_filter=None):
1271 unquoted_args = [] 1287 unquoted_args = []
1272 cmdsize = 100 + len(quoted_target) 1288 cmdsize = 100 + len(quoted_target)
1273 while update: 1289 while update:
1274 quoted_next = pipes.quote(update[0]) 1290 quoted_next = shlex.quote(update[0])
1275 size_next = len(quoted_next) + len(dest_dir) + 1 1291 size_next = len(quoted_next) + len(dest_dir) + 1
1276 logger.debug('cmdline length %d + %d < %d?' % (cmdsize, size_next, os.sysconf('SC_ARG_MAX'))) 1292 logger.debug('cmdline length %d + %d < %d?' % (cmdsize, size_next, os.sysconf('SC_ARG_MAX')))
1277 if cmdsize + size_next < max_cmdsize: 1293 if cmdsize + size_next < max_cmdsize:
@@ -1302,6 +1318,7 @@ actions = {
1302 "update": action_update, 1318 "update": action_update,
1303 "pull": action_pull, 1319 "pull": action_pull,
1304 "splitpatch": action_splitpatch, 1320 "splitpatch": action_splitpatch,
1321 "sync-revs": action_sync_revs,
1305} 1322}
1306 1323
1307def main(): 1324def main():
@@ -1312,10 +1329,11 @@ def main():
1312Create and update a combination layer repository from multiple component repositories. 1329Create and update a combination layer repository from multiple component repositories.
1313 1330
1314Action: 1331Action:
1315 init initialise the combo layer repo 1332 init initialise the combo layer repo
1316 update [components] get patches from component repos and apply them to the combo repo 1333 update [components] get patches from component repos and apply them to the combo repo
1317 pull [components] just pull component repos only 1334 pull [components] just pull component repos only
1318 splitpatch [commit] generate commit patch and split per component, default commit is HEAD""") 1335 sync-revs [components] update the config file's last_revision for each repository
1336 splitpatch [commit] generate commit patch and split per component, default commit is HEAD""")
1319 1337
1320 parser.add_option("-c", "--conf", help = "specify the config file (conf/combo-layer.conf is the default).", 1338 parser.add_option("-c", "--conf", help = "specify the config file (conf/combo-layer.conf is the default).",
1321 action = "store", dest = "conffile", default = "conf/combo-layer.conf") 1339 action = "store", dest = "conffile", default = "conf/combo-layer.conf")
diff --git a/scripts/combo-layer-hook-default.sh b/scripts/combo-layer-hook-default.sh
index 11547a9826..fb9651b31f 100755
--- a/scripts/combo-layer-hook-default.sh
+++ b/scripts/combo-layer-hook-default.sh
@@ -1,5 +1,7 @@
1#!/bin/sh 1#!/bin/sh
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Hook to add source component/revision info to commit message 7# Hook to add source component/revision info to commit message
diff --git a/scripts/contrib/bb-perf/bb-matrix-plot.sh b/scripts/contrib/bb-perf/bb-matrix-plot.sh
index e7bd129e9e..6672189c95 100755
--- a/scripts/contrib/bb-perf/bb-matrix-plot.sh
+++ b/scripts/contrib/bb-perf/bb-matrix-plot.sh
@@ -16,8 +16,8 @@
16 16
17# Setup the defaults 17# Setup the defaults
18DATFILE="bb-matrix.dat" 18DATFILE="bb-matrix.dat"
19XLABEL="BB_NUMBER_THREADS" 19XLABEL="BB\\\\_NUMBER\\\\_THREADS"
20YLABEL="PARALLEL_MAKE" 20YLABEL="PARALLEL\\\\_MAKE"
21FIELD=3 21FIELD=3
22DEF_TITLE="Elapsed Time (seconds)" 22DEF_TITLE="Elapsed Time (seconds)"
23PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100" 23PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100"
diff --git a/scripts/contrib/bbvars.py b/scripts/contrib/bbvars.py
index 090133600b..a9cdf082ab 100755
--- a/scripts/contrib/bbvars.py
+++ b/scripts/contrib/bbvars.py
@@ -36,8 +36,8 @@ def bbvar_is_documented(var, documented_vars):
36def collect_documented_vars(docfiles): 36def collect_documented_vars(docfiles):
37 ''' Walk the docfiles and collect the documented variables ''' 37 ''' Walk the docfiles and collect the documented variables '''
38 documented_vars = [] 38 documented_vars = []
39 prog = re.compile(".*($|[^A-Z_])<glossentry id=\'var-") 39 prog = re.compile(r".*($|[^A-Z_])<glossentry id=\'var-")
40 var_prog = re.compile('<glossentry id=\'var-(.*)\'>') 40 var_prog = re.compile(r'<glossentry id=\'var-(.*)\'>')
41 for d in docfiles: 41 for d in docfiles:
42 with open(d) as f: 42 with open(d) as f:
43 documented_vars += var_prog.findall(f.read()) 43 documented_vars += var_prog.findall(f.read())
@@ -45,7 +45,7 @@ def collect_documented_vars(docfiles):
45 return documented_vars 45 return documented_vars
46 46
47def bbvar_doctag(var, docconf): 47def bbvar_doctag(var, docconf):
48 prog = re.compile('^%s\[doc\] *= *"(.*)"' % (var)) 48 prog = re.compile(r'^%s\[doc\] *= *"(.*)"' % (var))
49 if docconf == "": 49 if docconf == "":
50 return "?" 50 return "?"
51 51
diff --git a/scripts/contrib/build-perf-test-wrapper.sh b/scripts/contrib/build-perf-test-wrapper.sh
index fa71d4a2e9..0a85e6e708 100755
--- a/scripts/contrib/build-perf-test-wrapper.sh
+++ b/scripts/contrib/build-perf-test-wrapper.sh
@@ -87,21 +87,10 @@ if [ $# -ne 0 ]; then
87 exit 1 87 exit 1
88fi 88fi
89 89
90if [ -n "$email_to" ]; then
91 if ! [ -x "$(command -v phantomjs)" ]; then
92 echo "ERROR: Sending email needs phantomjs."
93 exit 1
94 fi
95 if ! [ -x "$(command -v optipng)" ]; then
96 echo "ERROR: Sending email needs optipng."
97 exit 1
98 fi
99fi
100
101# Open a file descriptor for flock and acquire lock 90# Open a file descriptor for flock and acquire lock
102LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock" 91LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock"
103if ! exec 3> "$LOCK_FILE"; then 92if ! exec 3> "$LOCK_FILE"; then
104 echo "ERROR: Unable to open lock file" 93 echo "ERROR: Unable to open loemack file"
105 exit 1 94 exit 1
106fi 95fi
107if ! flock -n 3; then 96if ! flock -n 3; then
@@ -226,7 +215,7 @@ if [ -n "$results_repo" ]; then
226 if [ -n "$email_to" ]; then 215 if [ -n "$email_to" ]; then
227 echo "Emailing test report" 216 echo "Emailing test report"
228 os_name=`get_os_release_var PRETTY_NAME` 217 os_name=`get_os_release_var PRETTY_NAME`
229 "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt --html $report_html "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}" 218 "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}"
230 fi 219 fi
231 220
232 # Upload report files, unless we're on detached head 221 # Upload report files, unless we're on detached head
diff --git a/scripts/contrib/convert-overrides.py b/scripts/contrib/convert-overrides.py
new file mode 100755
index 0000000000..c69acb4095
--- /dev/null
+++ b/scripts/contrib/convert-overrides.py
@@ -0,0 +1,155 @@
1#!/usr/bin/env python3
2#
3# Conversion script to add new override syntax to existing bitbake metadata
4#
5# Copyright (C) 2021 Richard Purdie
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10#
11# To use this script on a new layer you need to list the overrides the
12# layer is known to use in the list below.
13#
14# Known constraint: Matching is 'loose' and in particular will find variable
15# and function names with "_append" and "_remove" in them. Those need to be
16# filtered out manually or in the skip list below.
17#
18
19import re
20import os
21import sys
22import tempfile
23import shutil
24import mimetypes
25import argparse
26
27parser = argparse.ArgumentParser(description="Convert override syntax")
28parser.add_argument("--override", "-o", action="append", default=[], help="Add additional strings to consider as an override (e.g. custom machines/distros")
29parser.add_argument("--skip", "-s", action="append", default=[], help="Add additional string to skip and not consider an override")
30parser.add_argument("--skip-ext", "-e", action="append", default=[], help="Additional file suffixes to skip when processing (e.g. '.foo')")
31parser.add_argument("--package-vars", action="append", default=[], help="Additional variables to treat as package variables")
32parser.add_argument("--image-vars", action="append", default=[], help="Additional variables to treat as image variables")
33parser.add_argument("--short-override", action="append", default=[], help="Additional strings to treat as short overrides")
34parser.add_argument("path", nargs="+", help="Paths to convert")
35
36args = parser.parse_args()
37
38# List of strings to treat as overrides
39vars = args.override
40vars += ["append", "prepend", "remove"]
41vars += ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"]
42vars += ["genericx86", "edgerouter", "beaglebone-yocto"]
43vars += ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"]
44vars += ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"]
45vars += ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"]
46vars += ["tune-", "pn-", "forcevariable"]
47vars += ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"]
48vars += ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"]
49vars += ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"]
50vars += ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"]
51vars += ["linux-gnueabi", "eabi"]
52vars += ["virtclass-multilib", "virtclass-mcextend"]
53
54# List of strings to treat as overrides but only with whitespace following or another override (more restricted matching).
55# Handles issues with arc matching arch.
56shortvars = ["arc", "mips", "mipsel", "sh4"] + args.short_override
57
58# Variables which take packagenames as an override
59packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY",
60 "pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE",
61 "PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst",
62 "pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA",
63 "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"] + args.package_vars
64
65# Expressions to skip if encountered, these are not overrides
66skips = args.skip
67skips += ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"]
68skips += ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"]
69skips += ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"]
70skips += ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"]
71skips += ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"]
72skips += ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"]
73skips += ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"]
74skips += ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"]
75skips += ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"]
76
77imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"] + args.image_vars
78packagevars += imagevars
79
80skip_ext = [".html", ".patch", ".m4", ".diff"] + args.skip_ext
81
82vars_re = {}
83for exp in vars:
84 vars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp)
85
86shortvars_re = {}
87for exp in shortvars:
88 shortvars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + r'([\(\'"\s:])'), r"\1:" + exp + r"\3")
89
90package_re = {}
91for exp in packagevars:
92 package_re[exp] = (re.compile(r'(^|[#\'"\s\-\+]+)' + exp + r'_' + r'([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2")
93
94# Other substitutions to make
95subs = {
96 'r = re.compile(r"([^:]+):\s*(.*)")' : 'r = re.compile(r"(^.+?):\s+(.*)")',
97 "val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))",
98 "f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))",
99 "d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))",
100 'ret.append(v + "_" + p)' : 'ret.append(v + ":" + p)',
101}
102
103def processfile(fn):
104 print("processing file '%s'" % fn)
105 try:
106 fh, abs_path = tempfile.mkstemp()
107 with os.fdopen(fh, 'w') as new_file:
108 with open(fn, "r") as old_file:
109 for line in old_file:
110 skip = False
111 for s in skips:
112 if s in line:
113 skip = True
114 if "ptest_append" in line or "ptest_remove" in line or "ptest_prepend" in line:
115 skip = False
116 for sub in subs:
117 if sub in line:
118 line = line.replace(sub, subs[sub])
119 skip = True
120 if not skip:
121 for pvar in packagevars:
122 line = package_re[pvar][0].sub(package_re[pvar][1], line)
123 for var in vars:
124 line = vars_re[var][0].sub(vars_re[var][1], line)
125 for shortvar in shortvars:
126 line = shortvars_re[shortvar][0].sub(shortvars_re[shortvar][1], line)
127 if "pkg_postinst:ontarget" in line:
128 line = line.replace("pkg_postinst:ontarget", "pkg_postinst_ontarget")
129 new_file.write(line)
130 shutil.copymode(fn, abs_path)
131 os.remove(fn)
132 shutil.move(abs_path, fn)
133 except UnicodeDecodeError:
134 pass
135
136ourname = os.path.basename(sys.argv[0])
137ourversion = "0.9.3"
138
139for p in args.path:
140 if os.path.isfile(p):
141 processfile(p)
142 else:
143 print("processing directory '%s'" % p)
144 for root, dirs, files in os.walk(p):
145 for name in files:
146 if name == ourname:
147 continue
148 fn = os.path.join(root, name)
149 if os.path.islink(fn):
150 continue
151 if "/.git/" in fn or any(fn.endswith(ext) for ext in skip_ext):
152 continue
153 processfile(fn)
154
155print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-spdx-licenses.py b/scripts/contrib/convert-spdx-licenses.py
new file mode 100755
index 0000000000..4e194dee3f
--- /dev/null
+++ b/scripts/contrib/convert-spdx-licenses.py
@@ -0,0 +1,145 @@
1#!/usr/bin/env python3
2#
3# Conversion script to change LICENSE entries to SPDX identifiers
4#
5# Copyright (C) 2021-2022 Richard Purdie
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import re
11import os
12import sys
13import tempfile
14import shutil
15import mimetypes
16
17if len(sys.argv) < 2:
18 print("Please specify a directory to run the conversion script against.")
19 sys.exit(1)
20
21license_map = {
22"AGPL-3" : "AGPL-3.0-only",
23"AGPL-3+" : "AGPL-3.0-or-later",
24"AGPLv3" : "AGPL-3.0-only",
25"AGPLv3+" : "AGPL-3.0-or-later",
26"AGPLv3.0" : "AGPL-3.0-only",
27"AGPLv3.0+" : "AGPL-3.0-or-later",
28"AGPL-3.0" : "AGPL-3.0-only",
29"AGPL-3.0+" : "AGPL-3.0-or-later",
30"BSD-0-Clause" : "0BSD",
31"GPL-1" : "GPL-1.0-only",
32"GPL-1+" : "GPL-1.0-or-later",
33"GPLv1" : "GPL-1.0-only",
34"GPLv1+" : "GPL-1.0-or-later",
35"GPLv1.0" : "GPL-1.0-only",
36"GPLv1.0+" : "GPL-1.0-or-later",
37"GPL-1.0" : "GPL-1.0-only",
38"GPL-1.0+" : "GPL-1.0-or-later",
39"GPL-2" : "GPL-2.0-only",
40"GPL-2+" : "GPL-2.0-or-later",
41"GPLv2" : "GPL-2.0-only",
42"GPLv2+" : "GPL-2.0-or-later",
43"GPLv2.0" : "GPL-2.0-only",
44"GPLv2.0+" : "GPL-2.0-or-later",
45"GPL-2.0" : "GPL-2.0-only",
46"GPL-2.0+" : "GPL-2.0-or-later",
47"GPL-3" : "GPL-3.0-only",
48"GPL-3+" : "GPL-3.0-or-later",
49"GPLv3" : "GPL-3.0-only",
50"GPLv3+" : "GPL-3.0-or-later",
51"GPLv3.0" : "GPL-3.0-only",
52"GPLv3.0+" : "GPL-3.0-or-later",
53"GPL-3.0" : "GPL-3.0-only",
54"GPL-3.0+" : "GPL-3.0-or-later",
55"LGPLv2" : "LGPL-2.0-only",
56"LGPLv2+" : "LGPL-2.0-or-later",
57"LGPLv2.0" : "LGPL-2.0-only",
58"LGPLv2.0+" : "LGPL-2.0-or-later",
59"LGPL-2.0" : "LGPL-2.0-only",
60"LGPL-2.0+" : "LGPL-2.0-or-later",
61"LGPL2.1" : "LGPL-2.1-only",
62"LGPL2.1+" : "LGPL-2.1-or-later",
63"LGPLv2.1" : "LGPL-2.1-only",
64"LGPLv2.1+" : "LGPL-2.1-or-later",
65"LGPL-2.1" : "LGPL-2.1-only",
66"LGPL-2.1+" : "LGPL-2.1-or-later",
67"LGPLv3" : "LGPL-3.0-only",
68"LGPLv3+" : "LGPL-3.0-or-later",
69"LGPL-3.0" : "LGPL-3.0-only",
70"LGPL-3.0+" : "LGPL-3.0-or-later",
71"MPL-1" : "MPL-1.0",
72"MPLv1" : "MPL-1.0",
73"MPLv1.1" : "MPL-1.1",
74"MPLv2" : "MPL-2.0",
75"MIT-X" : "MIT",
76"MIT-style" : "MIT",
77"openssl" : "OpenSSL",
78"PSF" : "PSF-2.0",
79"PSFv2" : "PSF-2.0",
80"Python-2" : "Python-2.0",
81"Apachev2" : "Apache-2.0",
82"Apache-2" : "Apache-2.0",
83"Artisticv1" : "Artistic-1.0",
84"Artistic-1" : "Artistic-1.0",
85"AFL-2" : "AFL-2.0",
86"AFL-1" : "AFL-1.2",
87"AFLv2" : "AFL-2.0",
88"AFLv1" : "AFL-1.2",
89"CDDLv1" : "CDDL-1.0",
90"CDDL-1" : "CDDL-1.0",
91"EPLv1.0" : "EPL-1.0",
92"FreeType" : "FTL",
93"Nauman" : "Naumen",
94"tcl" : "TCL",
95"vim" : "Vim",
96"SGIv1" : "SGI-1",
97}
98
99def processfile(fn):
100 print("processing file '%s'" % fn)
101 try:
102 fh, abs_path = tempfile.mkstemp()
103 modified = False
104 with os.fdopen(fh, 'w') as new_file:
105 with open(fn, "r") as old_file:
106 for line in old_file:
107 if not line.startswith("LICENSE"):
108 new_file.write(line)
109 continue
110 orig = line
111 for license in sorted(license_map, key=len, reverse=True):
112 for ending in ['"', "'", " ", ")"]:
113 line = line.replace(license + ending, license_map[license] + ending)
114 if orig != line:
115 modified = True
116 new_file.write(line)
117 new_file.close()
118 if modified:
119 shutil.copymode(fn, abs_path)
120 os.remove(fn)
121 shutil.move(abs_path, fn)
122 except UnicodeDecodeError:
123 pass
124
125ourname = os.path.basename(sys.argv[0])
126ourversion = "0.01"
127
128if os.path.isfile(sys.argv[1]):
129 processfile(sys.argv[1])
130 sys.exit(0)
131
132for targetdir in sys.argv[1:]:
133 print("processing directory '%s'" % targetdir)
134 for root, dirs, files in os.walk(targetdir):
135 for name in files:
136 if name == ourname:
137 continue
138 fn = os.path.join(root, name)
139 if os.path.islink(fn):
140 continue
141 if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
142 continue
143 processfile(fn)
144
145print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-srcuri.py b/scripts/contrib/convert-srcuri.py
new file mode 100755
index 0000000000..587392334f
--- /dev/null
+++ b/scripts/contrib/convert-srcuri.py
@@ -0,0 +1,77 @@
1#!/usr/bin/env python3
2#
3# Conversion script to update SRC_URI to add branch to git urls
4#
5# Copyright (C) 2021 Richard Purdie
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import re
11import os
12import sys
13import tempfile
14import shutil
15import mimetypes
16
17if len(sys.argv) < 2:
18 print("Please specify a directory to run the conversion script against.")
19 sys.exit(1)
20
21def processfile(fn):
22 def matchline(line):
23 if "MIRROR" in line or ".*" in line or "GNOME_GIT" in line:
24 return False
25 return True
26 print("processing file '%s'" % fn)
27 try:
28 if "distro_alias.inc" in fn or "linux-yocto-custom.bb" in fn:
29 return
30 fh, abs_path = tempfile.mkstemp()
31 modified = False
32 with os.fdopen(fh, 'w') as new_file:
33 with open(fn, "r") as old_file:
34 for line in old_file:
35 if ("git://" in line or "gitsm://" in line) and "branch=" not in line and matchline(line):
36 if line.endswith('"\n'):
37 line = line.replace('"\n', ';branch=master"\n')
38 elif re.search('\s*\\\\$', line):
39 line = re.sub('\s*\\\\$', ';branch=master \\\\', line)
40 modified = True
41 if ("git://" in line or "gitsm://" in line) and "github.com" in line and "protocol=https" not in line and matchline(line):
42 if "protocol=git" in line:
43 line = line.replace('protocol=git', 'protocol=https')
44 elif line.endswith('"\n'):
45 line = line.replace('"\n', ';protocol=https"\n')
46 elif re.search('\s*\\\\$', line):
47 line = re.sub('\s*\\\\$', ';protocol=https \\\\', line)
48 modified = True
49 new_file.write(line)
50 if modified:
51 shutil.copymode(fn, abs_path)
52 os.remove(fn)
53 shutil.move(abs_path, fn)
54 except UnicodeDecodeError:
55 pass
56
57ourname = os.path.basename(sys.argv[0])
58ourversion = "0.1"
59
60if os.path.isfile(sys.argv[1]):
61 processfile(sys.argv[1])
62 sys.exit(0)
63
64for targetdir in sys.argv[1:]:
65 print("processing directory '%s'" % targetdir)
66 for root, dirs, files in os.walk(targetdir):
67 for name in files:
68 if name == ourname:
69 continue
70 fn = os.path.join(root, name)
71 if os.path.islink(fn):
72 continue
73 if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"):
74 continue
75 processfile(fn)
76
77print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-variable-renames.py b/scripts/contrib/convert-variable-renames.py
new file mode 100755
index 0000000000..eded90ca61
--- /dev/null
+++ b/scripts/contrib/convert-variable-renames.py
@@ -0,0 +1,116 @@
1#!/usr/bin/env python3
2#
3# Conversion script to rename variables to versions with improved terminology.
4# Also highlights potentially problematic language and removed variables.
5#
6# Copyright (C) 2021 Richard Purdie
7# Copyright (C) 2022 Wind River Systems, Inc.
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12import re
13import os
14import sys
15import tempfile
16import shutil
17import mimetypes
18
19if len(sys.argv) < 2:
20 print("Please specify a directory to run the conversion script against.")
21 sys.exit(1)
22
23renames = {
24"BB_ENV_WHITELIST" : "BB_ENV_PASSTHROUGH",
25"BB_ENV_EXTRAWHITE" : "BB_ENV_PASSTHROUGH_ADDITIONS",
26"BB_HASHCONFIG_WHITELIST" : "BB_HASHCONFIG_IGNORE_VARS",
27"BB_SETSCENE_ENFORCE_WHITELIST" : "BB_SETSCENE_ENFORCE_IGNORE_TASKS",
28"BB_HASHBASE_WHITELIST" : "BB_BASEHASH_IGNORE_VARS",
29"BB_HASHTASK_WHITELIST" : "BB_TASKHASH_IGNORE_TASKS",
30"CVE_CHECK_PN_WHITELIST" : "CVE_CHECK_SKIP_RECIPE",
31"CVE_CHECK_WHITELIST" : "CVE_CHECK_IGNORE",
32"MULTI_PROVIDER_WHITELIST" : "BB_MULTI_PROVIDER_ALLOWED",
33"PNBLACKLIST" : "SKIP_RECIPE",
34"SDK_LOCAL_CONF_BLACKLIST" : "ESDK_LOCALCONF_REMOVE",
35"SDK_LOCAL_CONF_WHITELIST" : "ESDK_LOCALCONF_ALLOW",
36"SDK_INHERIT_BLACKLIST" : "ESDK_CLASS_INHERIT_DISABLE",
37"SSTATE_DUPWHITELIST" : "SSTATE_ALLOW_OVERLAP_FILES",
38"SYSROOT_DIRS_BLACKLIST" : "SYSROOT_DIRS_IGNORE",
39"UNKNOWN_CONFIGURE_WHITELIST" : "UNKNOWN_CONFIGURE_OPT_IGNORE",
40"ICECC_USER_CLASS_BL" : "ICECC_CLASS_DISABLE",
41"ICECC_SYSTEM_CLASS_BL" : "ICECC_CLASS_DISABLE",
42"ICECC_USER_PACKAGE_WL" : "ICECC_RECIPE_ENABLE",
43"ICECC_USER_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
44"ICECC_SYSTEM_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
45"LICENSE_FLAGS_WHITELIST" : "LICENSE_FLAGS_ACCEPTED",
46}
47
48removed_list = [
49"BB_STAMP_WHITELIST",
50"BB_STAMP_POLICY",
51"INHERIT_BLACKLIST",
52"TUNEABI_WHITELIST",
53]
54
55context_check_list = [
56"blacklist",
57"whitelist",
58"abort",
59]
60
61def processfile(fn):
62
63 print("processing file '%s'" % fn)
64 try:
65 fh, abs_path = tempfile.mkstemp()
66 modified = False
67 with os.fdopen(fh, 'w') as new_file:
68 with open(fn, "r") as old_file:
69 lineno = 0
70 for line in old_file:
71 lineno += 1
72 if not line or "BB_RENAMED_VARIABLE" in line:
73 continue
74 # Do the renames
75 for old_name, new_name in renames.items():
76 if old_name in line:
77 line = line.replace(old_name, new_name)
78 modified = True
79 # Find removed names
80 for removed_name in removed_list:
81 if removed_name in line:
82 print("%s needs further work at line %s because %s has been deprecated" % (fn, lineno, removed_name))
83 for check_word in context_check_list:
84 if re.search(check_word, line, re.IGNORECASE):
85 print("%s needs further work at line %s since it contains %s"% (fn, lineno, check_word))
86 new_file.write(line)
87 new_file.close()
88 if modified:
89 print("*** Modified file '%s'" % (fn))
90 shutil.copymode(fn, abs_path)
91 os.remove(fn)
92 shutil.move(abs_path, fn)
93 except UnicodeDecodeError:
94 pass
95
96ourname = os.path.basename(sys.argv[0])
97ourversion = "0.1"
98
99if os.path.isfile(sys.argv[1]):
100 processfile(sys.argv[1])
101 sys.exit(0)
102
103for targetdir in sys.argv[1:]:
104 print("processing directory '%s'" % targetdir)
105 for root, dirs, files in os.walk(targetdir):
106 for name in files:
107 if name == ourname:
108 continue
109 fn = os.path.join(root, name)
110 if os.path.islink(fn):
111 continue
112 if "ChangeLog" in fn or "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
113 continue
114 processfile(fn)
115
116print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/ddimage b/scripts/contrib/ddimage
index 7f2ad112a6..70eee8ebea 100755
--- a/scripts/contrib/ddimage
+++ b/scripts/contrib/ddimage
@@ -1,5 +1,7 @@
1#!/bin/sh 1#!/bin/sh
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
diff --git a/scripts/contrib/dialog-power-control b/scripts/contrib/dialog-power-control
index ad6070c369..82c84baa1d 100755
--- a/scripts/contrib/dialog-power-control
+++ b/scripts/contrib/dialog-power-control
@@ -1,5 +1,7 @@
1#!/bin/sh 1#!/bin/sh
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Simple script to show a manual power prompt for when you want to use 7# Simple script to show a manual power prompt for when you want to use
diff --git a/scripts/contrib/documentation-audit.sh b/scripts/contrib/documentation-audit.sh
index 1191f57a8e..7197a2fcea 100755
--- a/scripts/contrib/documentation-audit.sh
+++ b/scripts/contrib/documentation-audit.sh
@@ -1,5 +1,7 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Perform an audit of which packages provide documentation and which 7# Perform an audit of which packages provide documentation and which
@@ -26,8 +28,8 @@ if [ -z "$BITBAKE" ]; then
26fi 28fi
27 29
28echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results" 30echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results"
29echo "REMINDER: you need to set LICENSE_FLAGS_WHITELIST appropriately in local.conf or " 31echo "REMINDER: you need to set LICENSE_FLAGS_ACCEPTED appropriately in local.conf or "
30echo " you'll get false positives. For example, LICENSE_FLAGS_WHITELIST = \"Commercial\"" 32echo " you'll get false positives. For example, LICENSE_FLAGS_ACCEPTED = \"commercial\""
31 33
32for pkg in `bitbake -s | awk '{ print \$1 }'`; do 34for pkg in `bitbake -s | awk '{ print \$1 }'`; do
33 if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" || 35 if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" ||
diff --git a/scripts/contrib/image-manifest b/scripts/contrib/image-manifest
new file mode 100755
index 0000000000..4d65a99258
--- /dev/null
+++ b/scripts/contrib/image-manifest
@@ -0,0 +1,523 @@
1#!/usr/bin/env python3
2
3# Script to extract information from image manifests
4#
5# Copyright (C) 2018 Intel Corporation
6# Copyright (C) 2021 Wind River Systems, Inc.
7#
8# SPDX-License-Identifier: GPL-2.0-only
9#
10
11import sys
12import os
13import argparse
14import logging
15import json
16import shutil
17import tempfile
18import tarfile
19from collections import OrderedDict
20
21scripts_path = os.path.dirname(__file__)
22lib_path = scripts_path + '/../lib'
23sys.path = sys.path + [lib_path]
24
25import scriptutils
26logger = scriptutils.logger_create(os.path.basename(__file__))
27
28import argparse_oe
29import scriptpath
30bitbakepath = scriptpath.add_bitbake_lib_path()
31if not bitbakepath:
32 logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
33 sys.exit(1)
34logger.debug('Using standard bitbake path %s' % bitbakepath)
35scriptpath.add_oe_lib_path()
36
37import bb.tinfoil
38import bb.utils
39import oe.utils
40import oe.recipeutils
41
42def get_pkg_list(manifest):
43 pkglist = []
44 with open(manifest, 'r') as f:
45 for line in f:
46 linesplit = line.split()
47 if len(linesplit) == 3:
48 # manifest file
49 pkglist.append(linesplit[0])
50 elif len(linesplit) == 1:
51 # build dependency file
52 pkglist.append(linesplit[0])
53 return sorted(pkglist)
54
55def list_packages(args):
56 pkglist = get_pkg_list(args.manifest)
57 for pkg in pkglist:
58 print('%s' % pkg)
59
60def pkg2recipe(tinfoil, pkg):
61 if "-native" in pkg:
62 logger.info('skipping %s' % pkg)
63 return None
64
65 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
66 pkgdatafile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
67 logger.debug('pkgdatafile %s' % pkgdatafile)
68 try:
69 f = open(pkgdatafile, 'r')
70 for line in f:
71 if line.startswith('PN:'):
72 recipe = line.split(':', 1)[1].strip()
73 return recipe
74 except Exception:
75 logger.warning('%s is missing' % pkgdatafile)
76 return None
77
78def get_recipe_list(manifest, tinfoil):
79 pkglist = get_pkg_list(manifest)
80 recipelist = []
81 for pkg in pkglist:
82 recipe = pkg2recipe(tinfoil,pkg)
83 if recipe:
84 if not recipe in recipelist:
85 recipelist.append(recipe)
86
87 return sorted(recipelist)
88
89def list_recipes(args):
90 import bb.tinfoil
91 with bb.tinfoil.Tinfoil() as tinfoil:
92 tinfoil.logger.setLevel(logger.getEffectiveLevel())
93 tinfoil.prepare(config_only=True)
94 recipelist = get_recipe_list(args.manifest, tinfoil)
95 for recipe in sorted(recipelist):
96 print('%s' % recipe)
97
98def list_layers(args):
99
100 def find_git_repo(pth):
101 checkpth = pth
102 while checkpth != os.sep:
103 if os.path.exists(os.path.join(checkpth, '.git')):
104 return checkpth
105 checkpth = os.path.dirname(checkpth)
106 return None
107
108 def get_git_remote_branch(repodir):
109 try:
110 stdout, _ = bb.process.run(['git', 'rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{u}'], cwd=repodir)
111 except bb.process.ExecutionError as e:
112 stdout = None
113 if stdout:
114 return stdout.strip()
115 else:
116 return None
117
118 def get_git_head_commit(repodir):
119 try:
120 stdout, _ = bb.process.run(['git', 'rev-parse', 'HEAD'], cwd=repodir)
121 except bb.process.ExecutionError as e:
122 stdout = None
123 if stdout:
124 return stdout.strip()
125 else:
126 return None
127
128 def get_git_repo_url(repodir, remote='origin'):
129 import bb.process
130 # Try to get upstream repo location from origin remote
131 try:
132 stdout, _ = bb.process.run(['git', 'remote', '-v'], cwd=repodir)
133 except bb.process.ExecutionError as e:
134 stdout = None
135 if stdout:
136 for line in stdout.splitlines():
137 splitline = line.split()
138 if len(splitline) > 1:
139 if splitline[0] == remote and scriptutils.is_src_url(splitline[1]):
140 return splitline[1]
141 return None
142
143 with bb.tinfoil.Tinfoil() as tinfoil:
144 tinfoil.logger.setLevel(logger.getEffectiveLevel())
145 tinfoil.prepare(config_only=False)
146 layers = OrderedDict()
147 for layerdir in tinfoil.config_data.getVar('BBLAYERS').split():
148 layerdata = OrderedDict()
149 layername = os.path.basename(layerdir)
150 logger.debug('layername %s, layerdir %s' % (layername, layerdir))
151 if layername in layers:
152 logger.warning('layername %s is not unique in configuration' % layername)
153 layername = os.path.basename(os.path.dirname(layerdir)) + '_' + os.path.basename(layerdir)
154 logger.debug('trying layername %s' % layername)
155 if layername in layers:
156 logger.error('Layer name %s is not unique in configuration' % layername)
157 sys.exit(2)
158 repodir = find_git_repo(layerdir)
159 if repodir:
160 remotebranch = get_git_remote_branch(repodir)
161 remote = 'origin'
162 if remotebranch and '/' in remotebranch:
163 rbsplit = remotebranch.split('/', 1)
164 layerdata['actual_branch'] = rbsplit[1]
165 remote = rbsplit[0]
166 layerdata['vcs_url'] = get_git_repo_url(repodir, remote)
167 if os.path.abspath(repodir) != os.path.abspath(layerdir):
168 layerdata['vcs_subdir'] = os.path.relpath(layerdir, repodir)
169 commit = get_git_head_commit(repodir)
170 if commit:
171 layerdata['vcs_commit'] = commit
172 layers[layername] = layerdata
173
174 json.dump(layers, args.output, indent=2)
175
176def get_recipe(args):
177 with bb.tinfoil.Tinfoil() as tinfoil:
178 tinfoil.logger.setLevel(logger.getEffectiveLevel())
179 tinfoil.prepare(config_only=True)
180
181 recipe = pkg2recipe(tinfoil, args.package)
182 print(' %s package provided by %s' % (args.package, recipe))
183
184def pkg_dependencies(args):
185 def get_recipe_info(tinfoil, recipe):
186 try:
187 info = tinfoil.get_recipe_info(recipe)
188 except Exception:
189 logger.error('Failed to get recipe info for: %s' % recipe)
190 sys.exit(1)
191 if not info:
192 logger.warning('No recipe info found for: %s' % recipe)
193 sys.exit(1)
194 append_files = tinfoil.get_file_appends(info.fn)
195 appends = True
196 data = tinfoil.parse_recipe_file(info.fn, appends, append_files)
197 data.pn = info.pn
198 data.pv = info.pv
199 return data
200
201 def find_dependencies(tinfoil, assume_provided, recipe_info, packages, rn, order):
202 spaces = ' ' * order
203 data = recipe_info[rn]
204 if args.native:
205 logger.debug('%s- %s' % (spaces, data.pn))
206 elif "-native" not in data.pn:
207 if "cross" not in data.pn:
208 logger.debug('%s- %s' % (spaces, data.pn))
209
210 depends = []
211 for dep in data.depends:
212 if dep not in assume_provided:
213 depends.append(dep)
214
215 # First find all dependencies not in package list.
216 for dep in depends:
217 if dep not in packages:
218 packages.append(dep)
219 dep_data = get_recipe_info(tinfoil, dep)
220 # Do this once now to reduce the number of bitbake calls.
221 dep_data.depends = dep_data.getVar('DEPENDS').split()
222 recipe_info[dep] = dep_data
223
224 # Then recursively analyze all of the dependencies for the current recipe.
225 for dep in depends:
226 find_dependencies(tinfoil, assume_provided, recipe_info, packages, dep, order + 1)
227
228 with bb.tinfoil.Tinfoil() as tinfoil:
229 tinfoil.logger.setLevel(logger.getEffectiveLevel())
230 tinfoil.prepare()
231
232 assume_provided = tinfoil.config_data.getVar('ASSUME_PROVIDED').split()
233 logger.debug('assumed provided:')
234 for ap in sorted(assume_provided):
235 logger.debug(' - %s' % ap)
236
237 recipe = pkg2recipe(tinfoil, args.package)
238 data = get_recipe_info(tinfoil, recipe)
239 data.depends = []
240 depends = data.getVar('DEPENDS').split()
241 for dep in depends:
242 if dep not in assume_provided:
243 data.depends.append(dep)
244
245 recipe_info = dict([(recipe, data)])
246 packages = []
247 find_dependencies(tinfoil, assume_provided, recipe_info, packages, recipe, order=1)
248
249 print('\nThe following packages are required to build %s' % recipe)
250 for p in sorted(packages):
251 data = recipe_info[p]
252 if "-native" not in data.pn:
253 if "cross" not in data.pn:
254 print(" %s (%s)" % (data.pn,p))
255
256 if args.native:
257 print('\nThe following native packages are required to build %s' % recipe)
258 for p in sorted(packages):
259 data = recipe_info[p]
260 if "-native" in data.pn:
261 print(" %s(%s)" % (data.pn,p))
262 if "cross" in data.pn:
263 print(" %s(%s)" % (data.pn,p))
264
265def default_config():
266 vlist = OrderedDict()
267 vlist['PV'] = 'yes'
268 vlist['SUMMARY'] = 'no'
269 vlist['DESCRIPTION'] = 'no'
270 vlist['SECTION'] = 'no'
271 vlist['LICENSE'] = 'yes'
272 vlist['HOMEPAGE'] = 'no'
273 vlist['BUGTRACKER'] = 'no'
274 vlist['PROVIDES'] = 'no'
275 vlist['BBCLASSEXTEND'] = 'no'
276 vlist['DEPENDS'] = 'no'
277 vlist['PACKAGECONFIG'] = 'no'
278 vlist['SRC_URI'] = 'yes'
279 vlist['SRCREV'] = 'yes'
280 vlist['EXTRA_OECONF'] = 'no'
281 vlist['EXTRA_OESCONS'] = 'no'
282 vlist['EXTRA_OECMAKE'] = 'no'
283 vlist['EXTRA_OEMESON'] = 'no'
284
285 clist = OrderedDict()
286 clist['variables'] = vlist
287 clist['filepath'] = 'no'
288 clist['sha256sum'] = 'no'
289 clist['layerdir'] = 'no'
290 clist['layer'] = 'no'
291 clist['inherits'] = 'no'
292 clist['source_urls'] = 'no'
293 clist['packageconfig_opts'] = 'no'
294 clist['patches'] = 'no'
295 clist['packagedir'] = 'no'
296 return clist
297
298def dump_config(args):
299 config = default_config()
300 f = open('default_config.json', 'w')
301 json.dump(config, f, indent=2)
302 logger.info('Default config list dumped to default_config.json')
303
304def export_manifest_info(args):
305
306 def handle_value(value):
307 if value:
308 return oe.utils.squashspaces(value)
309 else:
310 return value
311
312 if args.config:
313 logger.debug('config: %s' % args.config)
314 f = open(args.config, 'r')
315 config = json.load(f, object_pairs_hook=OrderedDict)
316 else:
317 config = default_config()
318 if logger.isEnabledFor(logging.DEBUG):
319 print('Configuration:')
320 json.dump(config, sys.stdout, indent=2)
321 print('')
322
323 tmpoutdir = tempfile.mkdtemp(prefix=os.path.basename(__file__)+'-')
324 logger.debug('tmp dir: %s' % tmpoutdir)
325
326 # export manifest
327 shutil.copy2(args.manifest,os.path.join(tmpoutdir, "manifest"))
328
329 with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
330 tinfoil.logger.setLevel(logger.getEffectiveLevel())
331 tinfoil.prepare(config_only=False)
332
333 pkglist = get_pkg_list(args.manifest)
334 # export pkg list
335 f = open(os.path.join(tmpoutdir, "pkgs"), 'w')
336 for pkg in pkglist:
337 f.write('%s\n' % pkg)
338 f.close()
339
340 recipelist = []
341 for pkg in pkglist:
342 recipe = pkg2recipe(tinfoil,pkg)
343 if recipe:
344 if not recipe in recipelist:
345 recipelist.append(recipe)
346 recipelist.sort()
347 # export recipe list
348 f = open(os.path.join(tmpoutdir, "recipes"), 'w')
349 for recipe in recipelist:
350 f.write('%s\n' % recipe)
351 f.close()
352
353 try:
354 rvalues = OrderedDict()
355 for pn in sorted(recipelist):
356 logger.debug('Package: %s' % pn)
357 rd = tinfoil.parse_recipe(pn)
358
359 rvalues[pn] = OrderedDict()
360
361 for varname in config['variables']:
362 if config['variables'][varname] == 'yes':
363 rvalues[pn][varname] = handle_value(rd.getVar(varname))
364
365 fpth = rd.getVar('FILE')
366 layerdir = oe.recipeutils.find_layerdir(fpth)
367 if config['filepath'] == 'yes':
368 rvalues[pn]['filepath'] = os.path.relpath(fpth, layerdir)
369 if config['sha256sum'] == 'yes':
370 rvalues[pn]['sha256sum'] = bb.utils.sha256_file(fpth)
371
372 if config['layerdir'] == 'yes':
373 rvalues[pn]['layerdir'] = layerdir
374
375 if config['layer'] == 'yes':
376 rvalues[pn]['layer'] = os.path.basename(layerdir)
377
378 if config['inherits'] == 'yes':
379 gr = set(tinfoil.config_data.getVar("__inherit_cache") or [])
380 lr = set(rd.getVar("__inherit_cache") or [])
381 rvalues[pn]['inherits'] = sorted({os.path.splitext(os.path.basename(r))[0] for r in lr if r not in gr})
382
383 if config['source_urls'] == 'yes':
384 rvalues[pn]['source_urls'] = []
385 for url in (rd.getVar('SRC_URI') or '').split():
386 if not url.startswith('file://'):
387 url = url.split(';')[0]
388 rvalues[pn]['source_urls'].append(url)
389
390 if config['packageconfig_opts'] == 'yes':
391 rvalues[pn]['packageconfig_opts'] = OrderedDict()
392 for key in rd.getVarFlags('PACKAGECONFIG').keys():
393 if key == 'doc':
394 continue
395 rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key)
396
397 if config['patches'] == 'yes':
398 patches = oe.recipeutils.get_recipe_patches(rd)
399 rvalues[pn]['patches'] = []
400 if patches:
401 recipeoutdir = os.path.join(tmpoutdir, pn, 'patches')
402 bb.utils.mkdirhier(recipeoutdir)
403 for patch in patches:
404 # Patches may be in other layers too
405 patchlayerdir = oe.recipeutils.find_layerdir(patch)
406 # patchlayerdir will be None for remote patches, which we ignore
407 # (since currently they are considered as part of sources)
408 if patchlayerdir:
409 rvalues[pn]['patches'].append((os.path.basename(patchlayerdir), os.path.relpath(patch, patchlayerdir)))
410 shutil.copy(patch, recipeoutdir)
411
412 if config['packagedir'] == 'yes':
413 pn_dir = os.path.join(tmpoutdir, pn)
414 bb.utils.mkdirhier(pn_dir)
415 f = open(os.path.join(pn_dir, 'recipe.json'), 'w')
416 json.dump(rvalues[pn], f, indent=2)
417 f.close()
418
419 with open(os.path.join(tmpoutdir, 'recipes.json'), 'w') as f:
420 json.dump(rvalues, f, indent=2)
421
422 if args.output:
423 outname = os.path.basename(args.output)
424 else:
425 outname = os.path.splitext(os.path.basename(args.manifest))[0]
426 if outname.endswith('.tar.gz'):
427 outname = outname[:-7]
428 elif outname.endswith('.tgz'):
429 outname = outname[:-4]
430
431 tarfn = outname
432 if tarfn.endswith(os.sep):
433 tarfn = tarfn[:-1]
434 if not tarfn.endswith(('.tar.gz', '.tgz')):
435 tarfn += '.tar.gz'
436 with open(tarfn, 'wb') as f:
437 with tarfile.open(None, "w:gz", f) as tar:
438 tar.add(tmpoutdir, outname)
439 finally:
440 shutil.rmtree(tmpoutdir)
441
442
443def main():
444 parser = argparse_oe.ArgumentParser(description="Image manifest utility",
445 epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
446 parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
447 parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
448 subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
449 subparsers.required = True
450
451 # get recipe info
452 parser_get_recipes = subparsers.add_parser('recipe-info',
453 help='Get recipe info',
454 description='Get recipe information for a package')
455 parser_get_recipes.add_argument('package', help='Package name')
456 parser_get_recipes.set_defaults(func=get_recipe)
457
458 # list runtime dependencies
459 parser_pkg_dep = subparsers.add_parser('list-depends',
460 help='List dependencies',
461 description='List dependencies required to build the package')
462 parser_pkg_dep.add_argument('--native', help='also print native and cross packages', action='store_true')
463 parser_pkg_dep.add_argument('package', help='Package name')
464 parser_pkg_dep.set_defaults(func=pkg_dependencies)
465
466 # list recipes
467 parser_recipes = subparsers.add_parser('list-recipes',
468 help='List recipes producing packages within an image',
469 description='Lists recipes producing the packages that went into an image, using the manifest and pkgdata')
470 parser_recipes.add_argument('manifest', help='Manifest file')
471 parser_recipes.set_defaults(func=list_recipes)
472
473 # list packages
474 parser_packages = subparsers.add_parser('list-packages',
475 help='List packages within an image',
476 description='Lists packages that went into an image, using the manifest')
477 parser_packages.add_argument('manifest', help='Manifest file')
478 parser_packages.set_defaults(func=list_packages)
479
480 # list layers
481 parser_layers = subparsers.add_parser('list-layers',
482 help='List included layers',
483 description='Lists included layers')
484 parser_layers.add_argument('-o', '--output', help='Output file - defaults to stdout if not specified',
485 default=sys.stdout, type=argparse.FileType('w'))
486 parser_layers.set_defaults(func=list_layers)
487
488 # dump default configuration file
489 parser_dconfig = subparsers.add_parser('dump-config',
490 help='Dump default config',
491 description='Dump default config to default_config.json')
492 parser_dconfig.set_defaults(func=dump_config)
493
494 # export recipe info for packages in manifest
495 parser_export = subparsers.add_parser('manifest-info',
496 help='Export recipe info for a manifest',
497 description='Export recipe information using the manifest')
498 parser_export.add_argument('-c', '--config', help='load config from json file')
499 parser_export.add_argument('-o', '--output', help='Output file (tarball) - defaults to manifest name if not specified')
500 parser_export.add_argument('manifest', help='Manifest file')
501 parser_export.set_defaults(func=export_manifest_info)
502
503 args = parser.parse_args()
504
505 if args.debug:
506 logger.setLevel(logging.DEBUG)
507 logger.debug("Debug Enabled")
508 elif args.quiet:
509 logger.setLevel(logging.ERROR)
510
511 ret = args.func(args)
512
513 return ret
514
515
516if __name__ == "__main__":
517 try:
518 ret = main()
519 except Exception:
520 ret = 1
521 import traceback
522 traceback.print_exc()
523 sys.exit(ret)
diff --git a/scripts/contrib/list-packageconfig-flags.py b/scripts/contrib/list-packageconfig-flags.py
index d6de4dc84d..bb288e9099 100755
--- a/scripts/contrib/list-packageconfig-flags.py
+++ b/scripts/contrib/list-packageconfig-flags.py
@@ -33,7 +33,7 @@ import bb.tinfoil
33def get_fnlist(bbhandler, pkg_pn, preferred): 33def get_fnlist(bbhandler, pkg_pn, preferred):
34 ''' Get all recipe file names ''' 34 ''' Get all recipe file names '''
35 if preferred: 35 if preferred:
36 (latest_versions, preferred_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecaches[''], pkg_pn) 36 (latest_versions, preferred_versions, required_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecaches[''], pkg_pn)
37 37
38 fn_list = [] 38 fn_list = []
39 for pn in sorted(pkg_pn): 39 for pn in sorted(pkg_pn):
diff --git a/scripts/contrib/oe-build-perf-report-email.py b/scripts/contrib/oe-build-perf-report-email.py
index de3862c897..7192113c28 100755
--- a/scripts/contrib/oe-build-perf-report-email.py
+++ b/scripts/contrib/oe-build-perf-report-email.py
@@ -19,8 +19,6 @@ import socket
19import subprocess 19import subprocess
20import sys 20import sys
21import tempfile 21import tempfile
22from email.mime.image import MIMEImage
23from email.mime.multipart import MIMEMultipart
24from email.mime.text import MIMEText 22from email.mime.text import MIMEText
25 23
26 24
@@ -29,30 +27,6 @@ logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
29log = logging.getLogger('oe-build-perf-report') 27log = logging.getLogger('oe-build-perf-report')
30 28
31 29
32# Find js scaper script
33SCRAPE_JS = os.path.join(os.path.dirname(__file__), '..', 'lib', 'build_perf',
34 'scrape-html-report.js')
35if not os.path.isfile(SCRAPE_JS):
36 log.error("Unableto find oe-build-perf-report-scrape.js")
37 sys.exit(1)
38
39
40class ReportError(Exception):
41 """Local errors"""
42 pass
43
44
45def check_utils():
46 """Check that all needed utils are installed in the system"""
47 missing = []
48 for cmd in ('phantomjs', 'optipng'):
49 if not shutil.which(cmd):
50 missing.append(cmd)
51 if missing:
52 log.error("The following tools are missing: %s", ' '.join(missing))
53 sys.exit(1)
54
55
56def parse_args(argv): 30def parse_args(argv):
57 """Parse command line arguments""" 31 """Parse command line arguments"""
58 description = """Email build perf test report""" 32 description = """Email build perf test report"""
@@ -77,137 +51,19 @@ def parse_args(argv):
77 "the email parts") 51 "the email parts")
78 parser.add_argument('--text', 52 parser.add_argument('--text',
79 help="Plain text message") 53 help="Plain text message")
80 parser.add_argument('--html',
81 help="HTML peport generated by oe-build-perf-report")
82 parser.add_argument('--phantomjs-args', action='append',
83 help="Extra command line arguments passed to PhantomJS")
84 54
85 args = parser.parse_args(argv) 55 args = parser.parse_args(argv)
86 56
87 if not args.html and not args.text: 57 if not args.text:
88 parser.error("Please specify --html and/or --text") 58 parser.error("Please specify --text")
89 59
90 return args 60 return args
91 61
92 62
93def decode_png(infile, outfile): 63def send_email(text_fn, subject, recipients, copy=[], blind_copy=[]):
94 """Parse/decode/optimize png data from a html element"""
95 with open(infile) as f:
96 raw_data = f.read()
97
98 # Grab raw base64 data
99 b64_data = re.sub('^.*href="data:image/png;base64,', '', raw_data, 1)
100 b64_data = re.sub('">.+$', '', b64_data, 1)
101
102 # Replace file with proper decoded png
103 with open(outfile, 'wb') as f:
104 f.write(base64.b64decode(b64_data))
105
106 subprocess.check_output(['optipng', outfile], stderr=subprocess.STDOUT)
107
108
109def mangle_html_report(infile, outfile, pngs):
110 """Mangle html file into a email compatible format"""
111 paste = True
112 png_dir = os.path.dirname(outfile)
113 with open(infile) as f_in:
114 with open(outfile, 'w') as f_out:
115 for line in f_in.readlines():
116 stripped = line.strip()
117 # Strip out scripts
118 if stripped == '<!--START-OF-SCRIPTS-->':
119 paste = False
120 elif stripped == '<!--END-OF-SCRIPTS-->':
121 paste = True
122 elif paste:
123 if re.match('^.+href="data:image/png;base64', stripped):
124 # Strip out encoded pngs (as they're huge in size)
125 continue
126 elif 'www.gstatic.com' in stripped:
127 # HACK: drop references to external static pages
128 continue
129
130 # Replace charts with <img> elements
131 match = re.match('<div id="(?P<id>\w+)"', stripped)
132 if match and match.group('id') in pngs:
133 f_out.write('<img src="cid:{}"\n'.format(match.group('id')))
134 else:
135 f_out.write(line)
136
137
138def scrape_html_report(report, outdir, phantomjs_extra_args=None):
139 """Scrape html report into a format sendable by email"""
140 tmpdir = tempfile.mkdtemp(dir='.')
141 log.debug("Using tmpdir %s for phantomjs output", tmpdir)
142
143 if not os.path.isdir(outdir):
144 os.mkdir(outdir)
145 if os.path.splitext(report)[1] not in ('.html', '.htm'):
146 raise ReportError("Invalid file extension for report, needs to be "
147 "'.html' or '.htm'")
148
149 try:
150 log.info("Scraping HTML report with PhangomJS")
151 extra_args = phantomjs_extra_args if phantomjs_extra_args else []
152 subprocess.check_output(['phantomjs', '--debug=true'] + extra_args +
153 [SCRAPE_JS, report, tmpdir],
154 stderr=subprocess.STDOUT)
155
156 pngs = []
157 images = []
158 for fname in os.listdir(tmpdir):
159 base, ext = os.path.splitext(fname)
160 if ext == '.png':
161 log.debug("Decoding %s", fname)
162 decode_png(os.path.join(tmpdir, fname),
163 os.path.join(outdir, fname))
164 pngs.append(base)
165 images.append(fname)
166 elif ext in ('.html', '.htm'):
167 report_file = fname
168 else:
169 log.warning("Unknown file extension: '%s'", ext)
170 #shutil.move(os.path.join(tmpdir, fname), outdir)
171
172 log.debug("Mangling html report file %s", report_file)
173 mangle_html_report(os.path.join(tmpdir, report_file),
174 os.path.join(outdir, report_file), pngs)
175 return (os.path.join(outdir, report_file),
176 [os.path.join(outdir, i) for i in images])
177 finally:
178 shutil.rmtree(tmpdir)
179
180def send_email(text_fn, html_fn, image_fns, subject, recipients, copy=[],
181 blind_copy=[]):
182 """Send email"""
183 # Generate email message 64 # Generate email message
184 text_msg = html_msg = None 65 with open(text_fn) as f:
185 if text_fn: 66 msg = MIMEText("Yocto build performance test report.\n" + f.read(), 'plain')
186 with open(text_fn) as f:
187 text_msg = MIMEText("Yocto build performance test report.\n" +
188 f.read(), 'plain')
189 if html_fn:
190 html_msg = msg = MIMEMultipart('related')
191 with open(html_fn) as f:
192 html_msg.attach(MIMEText(f.read(), 'html'))
193 for img_fn in image_fns:
194 # Expect that content id is same as the filename
195 cid = os.path.splitext(os.path.basename(img_fn))[0]
196 with open(img_fn, 'rb') as f:
197 image_msg = MIMEImage(f.read())
198 image_msg['Content-ID'] = '<{}>'.format(cid)
199 html_msg.attach(image_msg)
200
201 if text_msg and html_msg:
202 msg = MIMEMultipart('alternative')
203 msg.attach(text_msg)
204 msg.attach(html_msg)
205 elif text_msg:
206 msg = text_msg
207 elif html_msg:
208 msg = html_msg
209 else:
210 raise ReportError("Neither plain text nor html body specified")
211 67
212 pw_data = pwd.getpwuid(os.getuid()) 68 pw_data = pwd.getpwuid(os.getuid())
213 full_name = pw_data.pw_gecos.split(',')[0] 69 full_name = pw_data.pw_gecos.split(',')[0]
@@ -234,8 +90,6 @@ def main(argv=None):
234 if args.debug: 90 if args.debug:
235 log.setLevel(logging.DEBUG) 91 log.setLevel(logging.DEBUG)
236 92
237 check_utils()
238
239 if args.outdir: 93 if args.outdir:
240 outdir = args.outdir 94 outdir = args.outdir
241 if not os.path.exists(outdir): 95 if not os.path.exists(outdir):
@@ -245,25 +99,16 @@ def main(argv=None):
245 99
246 try: 100 try:
247 log.debug("Storing email parts in %s", outdir) 101 log.debug("Storing email parts in %s", outdir)
248 html_report = images = None
249 if args.html:
250 html_report, images = scrape_html_report(args.html, outdir,
251 args.phantomjs_args)
252
253 if args.to: 102 if args.to:
254 log.info("Sending email to %s", ', '.join(args.to)) 103 log.info("Sending email to %s", ', '.join(args.to))
255 if args.cc: 104 if args.cc:
256 log.info("Copying to %s", ', '.join(args.cc)) 105 log.info("Copying to %s", ', '.join(args.cc))
257 if args.bcc: 106 if args.bcc:
258 log.info("Blind copying to %s", ', '.join(args.bcc)) 107 log.info("Blind copying to %s", ', '.join(args.bcc))
259 send_email(args.text, html_report, images, args.subject, 108 send_email(args.text, args.subject, args.to, args.cc, args.bcc)
260 args.to, args.cc, args.bcc)
261 except subprocess.CalledProcessError as err: 109 except subprocess.CalledProcessError as err:
262 log.error("%s, with output:\n%s", str(err), err.output.decode()) 110 log.error("%s, with output:\n%s", str(err), err.output.decode())
263 return 1 111 return 1
264 except ReportError as err:
265 log.error(err)
266 return 1
267 finally: 112 finally:
268 if not args.outdir: 113 if not args.outdir:
269 log.debug("Wiping %s", outdir) 114 log.debug("Wiping %s", outdir)
diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py
index 62c509f51c..bceae06561 100755
--- a/scripts/contrib/patchreview.py
+++ b/scripts/contrib/patchreview.py
@@ -1,14 +1,25 @@
1#! /usr/bin/env python3 1#! /usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
8import argparse
9import collections
10import json
11import os
12import os.path
13import pathlib
14import re
15import subprocess
16
6# TODO 17# TODO
7# - option to just list all broken files 18# - option to just list all broken files
8# - test suite 19# - test suite
9# - validate signed-off-by 20# - validate signed-off-by
10 21
11status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied") 22status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied", "inactive-upstream")
12 23
13class Result: 24class Result:
14 # Whether the patch has an Upstream-Status or not 25 # Whether the patch has an Upstream-Status or not
@@ -33,20 +44,18 @@ def blame_patch(patch):
33 From a patch filename, return a list of "commit summary (author name <author 44 From a patch filename, return a list of "commit summary (author name <author
34 email>)" strings representing the history. 45 email>)" strings representing the history.
35 """ 46 """
36 import subprocess
37 return subprocess.check_output(("git", "log", 47 return subprocess.check_output(("git", "log",
38 "--follow", "--find-renames", "--diff-filter=A", 48 "--follow", "--find-renames", "--diff-filter=A",
39 "--format=%s (%aN <%aE>)", 49 "--format=%s (%aN <%aE>)",
40 "--", patch)).decode("utf-8").splitlines() 50 "--", patch)).decode("utf-8").splitlines()
41 51
42def patchreview(path, patches): 52def patchreview(patches):
43 import re, os.path
44 53
45 # General pattern: start of line, optional whitespace, tag with optional 54 # General pattern: start of line, optional whitespace, tag with optional
46 # hyphen or spaces, maybe a colon, some whitespace, then the value, all case 55 # hyphen or spaces, maybe a colon, some whitespace, then the value, all case
47 # insensitive. 56 # insensitive.
48 sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE) 57 sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE)
49 status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*(\w*)", re.IGNORECASE | re.MULTILINE) 58 status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*([\w-]*)", re.IGNORECASE | re.MULTILINE)
50 cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE) 59 cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE)
51 cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE) 60 cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE)
52 61
@@ -54,11 +63,10 @@ def patchreview(path, patches):
54 63
55 for patch in patches: 64 for patch in patches:
56 65
57 fullpath = os.path.join(path, patch)
58 result = Result() 66 result = Result()
59 results[fullpath] = result 67 results[patch] = result
60 68
61 content = open(fullpath, encoding='ascii', errors='ignore').read() 69 content = open(patch, encoding='ascii', errors='ignore').read()
62 70
63 # Find the Signed-off-by tag 71 # Find the Signed-off-by tag
64 match = sob_re.search(content) 72 match = sob_re.search(content)
@@ -191,29 +199,56 @@ Patches in Pending state: %s""" % (total_patches,
191def histogram(results): 199def histogram(results):
192 from toolz import recipes, dicttoolz 200 from toolz import recipes, dicttoolz
193 import math 201 import math
202
194 counts = recipes.countby(lambda r: r.upstream_status, results.values()) 203 counts = recipes.countby(lambda r: r.upstream_status, results.values())
195 bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts) 204 bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts)
196 for k in bars: 205 for k in bars:
197 print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k])) 206 print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k]))
198 207
208def find_layers(candidate):
209 # candidate can either be the path to a layer directly (eg meta-intel), or a
210 # repository that contains other layers (meta-arm). We can determine what by
211 # looking for a conf/layer.conf file. If that file exists then it's a layer,
212 # otherwise its a repository of layers and we can assume they're called
213 # meta-*.
214
215 if (candidate / "conf" / "layer.conf").exists():
216 return [candidate.absolute()]
217 else:
218 return [d.absolute() for d in candidate.iterdir() if d.is_dir() and (d.name == "meta" or d.name.startswith("meta-"))]
219
220# TODO these don't actually handle dynamic-layers/
221
222def gather_patches(layers):
223 patches = []
224 for directory in layers:
225 filenames = subprocess.check_output(("git", "-C", directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff"), universal_newlines=True).split()
226 patches += [os.path.join(directory, f) for f in filenames]
227 return patches
228
229def count_recipes(layers):
230 count = 0
231 for directory in layers:
232 output = subprocess.check_output(["git", "-C", directory, "ls-files", "recipes-*/**/*.bb"], universal_newlines=True)
233 count += len(output.splitlines())
234 return count
199 235
200if __name__ == "__main__": 236if __name__ == "__main__":
201 import argparse, subprocess, os
202
203 args = argparse.ArgumentParser(description="Patch Review Tool") 237 args = argparse.ArgumentParser(description="Patch Review Tool")
204 args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches") 238 args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches")
205 args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results") 239 args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results")
206 args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram") 240 args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram")
207 args.add_argument("-j", "--json", help="update JSON") 241 args.add_argument("-j", "--json", help="update JSON")
208 args.add_argument("directory", help="directory to scan") 242 args.add_argument("directory", type=pathlib.Path, metavar="DIRECTORY", help="directory to scan (layer, or repository of layers)")
209 args = args.parse_args() 243 args = args.parse_args()
210 244
211 patches = subprocess.check_output(("git", "-C", args.directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff")).decode("utf-8").split() 245 layers = find_layers(args.directory)
212 results = patchreview(args.directory, patches) 246 print(f"Found layers {' '.join((d.name for d in layers))}")
247 patches = gather_patches(layers)
248 results = patchreview(patches)
213 analyse(results, want_blame=args.blame, verbose=args.verbose) 249 analyse(results, want_blame=args.blame, verbose=args.verbose)
214 250
215 if args.json: 251 if args.json:
216 import json, os.path, collections
217 if os.path.isfile(args.json): 252 if os.path.isfile(args.json):
218 data = json.load(open(args.json)) 253 data = json.load(open(args.json))
219 else: 254 else:
@@ -221,7 +256,11 @@ if __name__ == "__main__":
221 256
222 row = collections.Counter() 257 row = collections.Counter()
223 row["total"] = len(results) 258 row["total"] = len(results)
224 row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"]).decode("utf-8").strip() 259 row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"], universal_newlines=True).strip()
260 row["commit"] = subprocess.check_output(["git", "-C", args.directory, "rev-parse", "HEAD"], universal_newlines=True).strip()
261 row['commit_count'] = subprocess.check_output(["git", "-C", args.directory, "rev-list", "--count", "HEAD"], universal_newlines=True).strip()
262 row['recipe_count'] = count_recipes(layers)
263
225 for r in results.values(): 264 for r in results.values():
226 if r.upstream_status in status_values: 265 if r.upstream_status in status_values:
227 row[r.upstream_status] += 1 266 row[r.upstream_status] += 1
@@ -231,7 +270,7 @@ if __name__ == "__main__":
231 row['malformed-sob'] += 1 270 row['malformed-sob'] += 1
232 271
233 data.append(row) 272 data.append(row)
234 json.dump(data, open(args.json, "w")) 273 json.dump(data, open(args.json, "w"), sort_keys=True, indent="\t")
235 274
236 if args.histogram: 275 if args.histogram:
237 print() 276 print()
diff --git a/scripts/contrib/test_build_time.sh b/scripts/contrib/test_build_time.sh
index 23f238adf6..4012ac7ba7 100755
--- a/scripts/contrib/test_build_time.sh
+++ b/scripts/contrib/test_build_time.sh
@@ -97,7 +97,7 @@ if [ $? != 0 ] ; then
97 exit 251 97 exit 251
98fi 98fi
99 99
100if [ "$BB_ENV_EXTRAWHITE" != "" ] ; then 100if [ "BB_ENV_PASSTHROUGH_ADDITIONS" != "" ] ; then
101 echo "WARNING: you are running after sourcing the build environment script, this is not recommended" 101 echo "WARNING: you are running after sourcing the build environment script, this is not recommended"
102fi 102fi
103 103
diff --git a/scripts/contrib/test_build_time_worker.sh b/scripts/contrib/test_build_time_worker.sh
index 478e8b0d03..a2879d2336 100755
--- a/scripts/contrib/test_build_time_worker.sh
+++ b/scripts/contrib/test_build_time_worker.sh
@@ -1,5 +1,7 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# This is an example script to be used in conjunction with test_build_time.sh 7# This is an example script to be used in conjunction with test_build_time.sh
diff --git a/scripts/contrib/verify-homepage.py b/scripts/contrib/verify-homepage.py
index 7bffa78e23..a90b5010bc 100755
--- a/scripts/contrib/verify-homepage.py
+++ b/scripts/contrib/verify-homepage.py
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# This script can be used to verify HOMEPAGE values for all recipes in 7# This script can be used to verify HOMEPAGE values for all recipes in
diff --git a/scripts/cp-noerror b/scripts/cp-noerror
index ab617c5d35..13a098eee0 100755
--- a/scripts/cp-noerror
+++ b/scripts/cp-noerror
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Allow copying of $1 to $2 but if files in $1 disappear during the copy operation, 7# Allow copying of $1 to $2 but if files in $1 disappear during the copy operation,
diff --git a/scripts/create-pull-request b/scripts/create-pull-request
index 8eefcf63a5..885105fab3 100755
--- a/scripts/create-pull-request
+++ b/scripts/create-pull-request
@@ -128,7 +128,7 @@ PROTO_RE="[a-z][a-z+]*://"
128GIT_RE="\(^\($PROTO_RE\)\?\)\($USER_RE@\)\?\([^:/]*\)[:/]\(.*\)" 128GIT_RE="\(^\($PROTO_RE\)\?\)\($USER_RE@\)\?\([^:/]*\)[:/]\(.*\)"
129REMOTE_URL=${REMOTE_URL%.git} 129REMOTE_URL=${REMOTE_URL%.git}
130REMOTE_REPO=$(echo $REMOTE_URL | sed "s#$GIT_RE#\5#") 130REMOTE_REPO=$(echo $REMOTE_URL | sed "s#$GIT_RE#\5#")
131REMOTE_URL=$(echo $REMOTE_URL | sed "s#$GIT_RE#git://\4/\5#") 131REMOTE_URL=$(echo $REMOTE_URL | sed "s#$GIT_RE#https://\4/\5#")
132 132
133if [ -z "$BRANCH" ]; then 133if [ -z "$BRANCH" ]; then
134 BRANCH=$(git branch | grep -e "^\* " | cut -d' ' -f2) 134 BRANCH=$(git branch | grep -e "^\* " | cut -d' ' -f2)
@@ -149,13 +149,10 @@ fi
149WEB_URL="" 149WEB_URL=""
150case "$REMOTE_URL" in 150case "$REMOTE_URL" in
151 *git.yoctoproject.org*) 151 *git.yoctoproject.org*)
152 WEB_URL="http://git.yoctoproject.org/cgit.cgi/$REMOTE_REPO/log/?h=$BRANCH" 152 WEB_URL="https://git.yoctoproject.org/$REMOTE_REPO/log/?h=$BRANCH"
153 ;;
154 *git.pokylinux.org*)
155 WEB_URL="http://git.pokylinux.org/cgit.cgi/$REMOTE_REPO/log/?h=$BRANCH"
156 ;; 153 ;;
157 *git.openembedded.org*) 154 *git.openembedded.org*)
158 WEB_URL="http://cgit.openembedded.org/$REMOTE_REPO/log/?h=$BRANCH" 155 WEB_URL="https://git.openembedded.org/$REMOTE_REPO/log/?h=$BRANCH"
159 ;; 156 ;;
160 *github.com*) 157 *github.com*)
161 WEB_URL="https://github.com/$REMOTE_REPO/tree/$BRANCH" 158 WEB_URL="https://github.com/$REMOTE_REPO/tree/$BRANCH"
diff --git a/scripts/cross-intercept/ar b/scripts/cross-intercept/ar
new file mode 120000
index 0000000000..bc68ffd7a2
--- /dev/null
+++ b/scripts/cross-intercept/ar
@@ -0,0 +1 @@
../native-intercept/ar \ No newline at end of file
diff --git a/scripts/crosstap b/scripts/crosstap
index 40856bc208..5aa72f14d4 100755
--- a/scripts/crosstap
+++ b/scripts/crosstap
@@ -353,7 +353,7 @@ bitbake workspace.
353 353
354Anything after -- option is passed directly to stap. 354Anything after -- option is passed directly to stap.
355 355
356Legacy script invocation style supported but depreciated: 356Legacy script invocation style supported but deprecated:
357 %prog <user@hostname> <sytemtap-script> [systemtap options] 357 %prog <user@hostname> <sytemtap-script> [systemtap options]
358 358
359To enable most out of systemtap the following site.conf or local.conf 359To enable most out of systemtap the following site.conf or local.conf
@@ -365,13 +365,13 @@ IMAGE_FSTYPES_DEBUGFS = "tar.bz2"
365USER_CLASSES += "image-combined-dbg" 365USER_CLASSES += "image-combined-dbg"
366 366
367# enables kernel debug symbols 367# enables kernel debug symbols
368KERNEL_EXTRA_FEATURES_append = " features/debug/debug-kernel.scc" 368KERNEL_EXTRA_FEATURES:append = " features/debug/debug-kernel.scc"
369 369
370# minimal, just run-time systemtap configuration in target image 370# minimal, just run-time systemtap configuration in target image
371PACKAGECONFIG_pn-systemtap = "monitor" 371PACKAGECONFIG:pn-systemtap = "monitor"
372 372
373# add systemtap run-time into target image if it is not there yet 373# add systemtap run-time into target image if it is not there yet
374IMAGE_INSTALL_append = " systemtap" 374IMAGE_INSTALL:append = " systemtap"
375""" 375"""
376 option_parser = optparse.OptionParser(usage=usage) 376 option_parser = optparse.OptionParser(usage=usage)
377 377
diff --git a/scripts/devtool b/scripts/devtool
index 8a4f41bc37..60ea3e8298 100755
--- a/scripts/devtool
+++ b/scripts/devtool
@@ -100,10 +100,11 @@ def read_workspace():
100 _enable_workspace_layer(config.workspace_path, config, basepath) 100 _enable_workspace_layer(config.workspace_path, config, basepath)
101 101
102 logger.debug('Reading workspace in %s' % config.workspace_path) 102 logger.debug('Reading workspace in %s' % config.workspace_path)
103 externalsrc_re = re.compile(r'^EXTERNALSRC(_pn-([^ =]+))? *= *"([^"]*)"$') 103 externalsrc_re = re.compile(r'^EXTERNALSRC(:pn-([^ =]+))? *= *"([^"]*)"$')
104 for fn in glob.glob(os.path.join(config.workspace_path, 'appends', '*.bbappend')): 104 for fn in glob.glob(os.path.join(config.workspace_path, 'appends', '*.bbappend')):
105 with open(fn, 'r') as f: 105 with open(fn, 'r') as f:
106 pnvalues = {} 106 pnvalues = {}
107 pn = None
107 for line in f: 108 for line in f:
108 res = externalsrc_re.match(line.rstrip()) 109 res = externalsrc_re.match(line.rstrip())
109 if res: 110 if res:
@@ -123,6 +124,9 @@ def read_workspace():
123 elif line.startswith('# srctreebase: '): 124 elif line.startswith('# srctreebase: '):
124 pnvalues['srctreebase'] = line.split(':', 1)[1].strip() 125 pnvalues['srctreebase'] = line.split(':', 1)[1].strip()
125 if pnvalues: 126 if pnvalues:
127 if not pn:
128 raise DevtoolError("Found *.bbappend in %s, but could not determine EXTERNALSRC:pn-*. "
129 "Maybe still using old syntax?" % config.workspace_path)
126 if not pnvalues.get('srctreebase', None): 130 if not pnvalues.get('srctreebase', None):
127 pnvalues['srctreebase'] = pnvalues['srctree'] 131 pnvalues['srctreebase'] = pnvalues['srctree']
128 logger.debug('Found recipe %s' % pnvalues) 132 logger.debug('Found recipe %s' % pnvalues)
@@ -133,17 +137,27 @@ def create_workspace(args, config, basepath, workspace):
133 workspacedir = os.path.abspath(args.layerpath) 137 workspacedir = os.path.abspath(args.layerpath)
134 else: 138 else:
135 workspacedir = os.path.abspath(os.path.join(basepath, 'workspace')) 139 workspacedir = os.path.abspath(os.path.join(basepath, 'workspace'))
136 _create_workspace(workspacedir, config, basepath) 140 layerseries = None
141 if args.layerseries:
142 layerseries = args.layerseries
143 _create_workspace(workspacedir, config, basepath, layerseries)
137 if not args.create_only: 144 if not args.create_only:
138 _enable_workspace_layer(workspacedir, config, basepath) 145 _enable_workspace_layer(workspacedir, config, basepath)
139 146
140def _create_workspace(workspacedir, config, basepath): 147def _create_workspace(workspacedir, config, basepath, layerseries=None):
141 import bb 148 import bb
142 149
143 confdir = os.path.join(workspacedir, 'conf') 150 confdir = os.path.join(workspacedir, 'conf')
144 if os.path.exists(os.path.join(confdir, 'layer.conf')): 151 if os.path.exists(os.path.join(confdir, 'layer.conf')):
145 logger.info('Specified workspace already set up, leaving as-is') 152 logger.info('Specified workspace already set up, leaving as-is')
146 else: 153 else:
154 if not layerseries:
155 tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
156 try:
157 layerseries = tinfoil.config_data.getVar('LAYERSERIES_CORENAMES')
158 finally:
159 tinfoil.shutdown()
160
147 # Add a config file 161 # Add a config file
148 bb.utils.mkdirhier(confdir) 162 bb.utils.mkdirhier(confdir)
149 with open(os.path.join(confdir, 'layer.conf'), 'w') as f: 163 with open(os.path.join(confdir, 'layer.conf'), 'w') as f:
@@ -155,7 +169,7 @@ def _create_workspace(workspacedir, config, basepath):
155 f.write('BBFILE_PATTERN_workspacelayer = "^$' + '{LAYERDIR}/"\n') 169 f.write('BBFILE_PATTERN_workspacelayer = "^$' + '{LAYERDIR}/"\n')
156 f.write('BBFILE_PATTERN_IGNORE_EMPTY_workspacelayer = "1"\n') 170 f.write('BBFILE_PATTERN_IGNORE_EMPTY_workspacelayer = "1"\n')
157 f.write('BBFILE_PRIORITY_workspacelayer = "99"\n') 171 f.write('BBFILE_PRIORITY_workspacelayer = "99"\n')
158 f.write('LAYERSERIES_COMPAT_workspacelayer = "${LAYERSERIES_COMPAT_core}"\n') 172 f.write('LAYERSERIES_COMPAT_workspacelayer = "%s"\n' % layerseries)
159 # Add a README file 173 # Add a README file
160 with open(os.path.join(workspacedir, 'README'), 'w') as f: 174 with open(os.path.join(workspacedir, 'README'), 'w') as f:
161 f.write('This layer was created by the OpenEmbedded devtool utility in order to\n') 175 f.write('This layer was created by the OpenEmbedded devtool utility in order to\n')
@@ -285,8 +299,9 @@ def main():
285 return 2 299 return 2
286 300
287 # Search BBPATH first to allow layers to override plugins in scripts_path 301 # Search BBPATH first to allow layers to override plugins in scripts_path
288 for path in global_args.bbpath.split(':') + [scripts_path]: 302 pluginpaths = [os.path.join(path, 'lib', 'devtool') for path in global_args.bbpath.split(':') + [scripts_path]]
289 pluginpath = os.path.join(path, 'lib', 'devtool') 303 context.pluginpaths = pluginpaths
304 for pluginpath in pluginpaths:
290 scriptutils.load_plugins(logger, plugins, pluginpath) 305 scriptutils.load_plugins(logger, plugins, pluginpath)
291 306
292 subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>') 307 subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
@@ -305,6 +320,7 @@ def main():
305 description='Sets up a new workspace. NOTE: other devtool subcommands will create a workspace automatically as needed, so you only need to use %(prog)s if you want to specify where the workspace should be located.', 320 description='Sets up a new workspace. NOTE: other devtool subcommands will create a workspace automatically as needed, so you only need to use %(prog)s if you want to specify where the workspace should be located.',
306 group='advanced') 321 group='advanced')
307 parser_create_workspace.add_argument('layerpath', nargs='?', help='Path in which the workspace layer should be created') 322 parser_create_workspace.add_argument('layerpath', nargs='?', help='Path in which the workspace layer should be created')
323 parser_create_workspace.add_argument('--layerseries', help='Layer series the workspace should be set to be compatible with')
308 parser_create_workspace.add_argument('--create-only', action="store_true", help='Only create the workspace layer, do not alter configuration') 324 parser_create_workspace.add_argument('--create-only', action="store_true", help='Only create the workspace layer, do not alter configuration')
309 parser_create_workspace.set_defaults(func=create_workspace, no_workspace=True) 325 parser_create_workspace.set_defaults(func=create_workspace, no_workspace=True)
310 326
@@ -314,10 +330,10 @@ def main():
314 330
315 args = parser.parse_args(unparsed_args, namespace=global_args) 331 args = parser.parse_args(unparsed_args, namespace=global_args)
316 332
317 if not getattr(args, 'no_workspace', False):
318 read_workspace()
319
320 try: 333 try:
334 if not getattr(args, 'no_workspace', False):
335 read_workspace()
336
321 ret = args.func(args, config, basepath, workspace) 337 ret = args.func(args, config, basepath, workspace)
322 except DevtoolError as err: 338 except DevtoolError as err:
323 if str(err): 339 if str(err):
diff --git a/scripts/esdk-tools/devtool b/scripts/esdk-tools/devtool
new file mode 120000
index 0000000000..176a01ca68
--- /dev/null
+++ b/scripts/esdk-tools/devtool
@@ -0,0 +1 @@
../devtool \ No newline at end of file
diff --git a/scripts/esdk-tools/oe-find-native-sysroot b/scripts/esdk-tools/oe-find-native-sysroot
new file mode 120000
index 0000000000..d3493f3310
--- /dev/null
+++ b/scripts/esdk-tools/oe-find-native-sysroot
@@ -0,0 +1 @@
../oe-find-native-sysroot \ No newline at end of file
diff --git a/scripts/esdk-tools/recipetool b/scripts/esdk-tools/recipetool
new file mode 120000
index 0000000000..60a95dd936
--- /dev/null
+++ b/scripts/esdk-tools/recipetool
@@ -0,0 +1 @@
../recipetool \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu b/scripts/esdk-tools/runqemu
new file mode 120000
index 0000000000..ae7e7ad7c2
--- /dev/null
+++ b/scripts/esdk-tools/runqemu
@@ -0,0 +1 @@
../runqemu \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-addptable2image b/scripts/esdk-tools/runqemu-addptable2image
new file mode 120000
index 0000000000..afcd00e79d
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-addptable2image
@@ -0,0 +1 @@
../runqemu-addptable2image \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-export-rootfs b/scripts/esdk-tools/runqemu-export-rootfs
new file mode 120000
index 0000000000..a26fcf6110
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-export-rootfs
@@ -0,0 +1 @@
../runqemu-export-rootfs \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-extract-sdk b/scripts/esdk-tools/runqemu-extract-sdk
new file mode 120000
index 0000000000..cc858aaad5
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-extract-sdk
@@ -0,0 +1 @@
../runqemu-extract-sdk \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-gen-tapdevs b/scripts/esdk-tools/runqemu-gen-tapdevs
new file mode 120000
index 0000000000..dbdf79134c
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-gen-tapdevs
@@ -0,0 +1 @@
../runqemu-gen-tapdevs \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-ifdown b/scripts/esdk-tools/runqemu-ifdown
new file mode 120000
index 0000000000..0097693ca3
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-ifdown
@@ -0,0 +1 @@
../runqemu-ifdown \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-ifup b/scripts/esdk-tools/runqemu-ifup
new file mode 120000
index 0000000000..41026d2c0a
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-ifup
@@ -0,0 +1 @@
../runqemu-ifup \ No newline at end of file
diff --git a/scripts/esdk-tools/wic b/scripts/esdk-tools/wic
new file mode 120000
index 0000000000..a9d908aa25
--- /dev/null
+++ b/scripts/esdk-tools/wic
@@ -0,0 +1 @@
../wic \ No newline at end of file
diff --git a/scripts/gen-lockedsig-cache b/scripts/gen-lockedsig-cache
index cd8f9a4356..023015ec41 100755
--- a/scripts/gen-lockedsig-cache
+++ b/scripts/gen-lockedsig-cache
@@ -1,5 +1,8 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3#
4# Copyright OpenEmbedded Contributors
5#
3# SPDX-License-Identifier: GPL-2.0-only 6# SPDX-License-Identifier: GPL-2.0-only
4# 7#
5 8
@@ -94,8 +97,7 @@ print("Gathering file list took %.1fs" % elapsed)
94print('Processing files') 97print('Processing files')
95for f in files: 98for f in files:
96 sys.stdout.write('Processing %s... ' % f) 99 sys.stdout.write('Processing %s... ' % f)
97 _, ext = os.path.splitext(f) 100 if not f.endswith(('.tar.zst', '.siginfo', '.sig')):
98 if not ext in ['.tgz', '.siginfo', '.sig']:
99 # Most likely a temp file, skip it 101 # Most likely a temp file, skip it
100 print('skipping') 102 print('skipping')
101 continue 103 continue
diff --git a/scripts/git b/scripts/git
new file mode 100755
index 0000000000..689adbf9dd
--- /dev/null
+++ b/scripts/git
@@ -0,0 +1,30 @@
1#!/usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7# Wrapper around 'git' that doesn't think we are root
8
9import os
10import shutil
11import sys
12
13os.environ['PSEUDO_UNLOAD'] = '1'
14
15# calculate path to the real 'git'
16path = os.environ['PATH']
17# we need to remove our path but also any other copy of this script which
18# may be present, e.g. eSDK.
19replacements = [os.path.dirname(sys.argv[0])]
20for p in path.split(":"):
21 if p.endswith("/scripts"):
22 replacements.append(p)
23for r in replacements:
24 path = path.replace(r, '/ignoreme')
25real_git = shutil.which('git', path=path)
26
27if len(sys.argv) == 1:
28 os.execl(real_git, 'git')
29
30os.execv(real_git, sys.argv)
diff --git a/scripts/install-buildtools b/scripts/install-buildtools
index 8554a5db67..2218f3ffac 100755
--- a/scripts/install-buildtools
+++ b/scripts/install-buildtools
@@ -57,9 +57,9 @@ logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout)
57 57
58DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools') 58DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools')
59DEFAULT_BASE_URL = 'http://downloads.yoctoproject.org/releases/yocto' 59DEFAULT_BASE_URL = 'http://downloads.yoctoproject.org/releases/yocto'
60DEFAULT_RELEASE = 'yocto-3.2_M3' 60DEFAULT_RELEASE = 'yocto-4.1'
61DEFAULT_INSTALLER_VERSION = '3.1+snapshot' 61DEFAULT_INSTALLER_VERSION = '4.1'
62DEFAULT_BUILDDATE = '20200923' 62DEFAULT_BUILDDATE = '202110XX'
63 63
64# Python version sanity check 64# Python version sanity check
65if not (sys.version_info.major == 3 and sys.version_info.minor >= 4): 65if not (sys.version_info.major == 3 and sys.version_info.minor >= 4):
@@ -154,6 +154,8 @@ def main():
154 group.add_argument('--without-extended-buildtools', action='store_false', 154 group.add_argument('--without-extended-buildtools', action='store_false',
155 dest='with_extended_buildtools', 155 dest='with_extended_buildtools',
156 help='disable extended buildtools (traditional buildtools tarball)') 156 help='disable extended buildtools (traditional buildtools tarball)')
157 group.add_argument('--make-only', action='store_true',
158 help='only install make tarball')
157 group = parser.add_mutually_exclusive_group() 159 group = parser.add_mutually_exclusive_group()
158 group.add_argument('-c', '--check', help='enable checksum validation', 160 group.add_argument('-c', '--check', help='enable checksum validation',
159 default=True, action='store_true') 161 default=True, action='store_true')
@@ -170,6 +172,9 @@ def main():
170 172
171 args = parser.parse_args() 173 args = parser.parse_args()
172 174
175 if args.make_only:
176 args.with_extended_buildtools = False
177
173 if args.debug: 178 if args.debug:
174 logger.setLevel(logging.DEBUG) 179 logger.setLevel(logging.DEBUG)
175 elif args.quiet: 180 elif args.quiet:
@@ -197,7 +202,10 @@ def main():
197 if not args.build_date: 202 if not args.build_date:
198 logger.error("Milestone installers require --build-date") 203 logger.error("Milestone installers require --build-date")
199 else: 204 else:
200 if args.with_extended_buildtools: 205 if args.make_only:
206 filename = "%s-buildtools-make-nativesdk-standalone-%s-%s.sh" % (
207 arch, args.installer_version, args.build_date)
208 elif args.with_extended_buildtools:
201 filename = "%s-buildtools-extended-nativesdk-standalone-%s-%s.sh" % ( 209 filename = "%s-buildtools-extended-nativesdk-standalone-%s-%s.sh" % (
202 arch, args.installer_version, args.build_date) 210 arch, args.installer_version, args.build_date)
203 else: 211 else:
@@ -207,6 +215,8 @@ def main():
207 buildtools_url = "%s/milestones/%s/buildtools/%s" % (base_url, args.release, safe_filename) 215 buildtools_url = "%s/milestones/%s/buildtools/%s" % (base_url, args.release, safe_filename)
208 # regular release SDK 216 # regular release SDK
209 else: 217 else:
218 if args.make_only:
219 filename = "%s-buildtools-make-nativesdk-standalone-%s.sh" % (arch, args.installer_version)
210 if args.with_extended_buildtools: 220 if args.with_extended_buildtools:
211 filename = "%s-buildtools-extended-nativesdk-standalone-%s.sh" % (arch, args.installer_version) 221 filename = "%s-buildtools-extended-nativesdk-standalone-%s.sh" % (arch, args.installer_version)
212 else: 222 else:
@@ -303,7 +313,9 @@ def main():
303 if args.with_extended_buildtools and not m: 313 if args.with_extended_buildtools and not m:
304 logger.info("Ignoring --with-extended-buildtools as filename " 314 logger.info("Ignoring --with-extended-buildtools as filename "
305 "does not contain 'extended'") 315 "does not contain 'extended'")
306 if args.with_extended_buildtools and m: 316 if args.make_only:
317 tool = 'make'
318 elif args.with_extended_buildtools and m:
307 tool = 'gcc' 319 tool = 'gcc'
308 else: 320 else:
309 tool = 'tar' 321 tool = 'tar'
diff --git a/scripts/lib/argparse_oe.py b/scripts/lib/argparse_oe.py
index 94a4ac5011..176b732bbc 100644
--- a/scripts/lib/argparse_oe.py
+++ b/scripts/lib/argparse_oe.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/scripts/lib/build_perf/report.py b/scripts/lib/build_perf/report.py
index 4e8e2a8a93..ab77424cc7 100644
--- a/scripts/lib/build_perf/report.py
+++ b/scripts/lib/build_perf/report.py
@@ -4,7 +4,8 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6"""Handling of build perf test reports""" 6"""Handling of build perf test reports"""
7from collections import OrderedDict, Mapping, namedtuple 7from collections import OrderedDict, namedtuple
8from collections.abc import Mapping
8from datetime import datetime, timezone 9from datetime import datetime, timezone
9from numbers import Number 10from numbers import Number
10from statistics import mean, stdev, variance 11from statistics import mean, stdev, variance
diff --git a/scripts/lib/buildstats.py b/scripts/lib/buildstats.py
index c69b5bf4d7..6db60d5bcf 100644
--- a/scripts/lib/buildstats.py
+++ b/scripts/lib/buildstats.py
@@ -8,7 +8,7 @@ import json
8import logging 8import logging
9import os 9import os
10import re 10import re
11from collections import namedtuple,OrderedDict 11from collections import namedtuple
12from statistics import mean 12from statistics import mean
13 13
14 14
@@ -79,8 +79,8 @@ class BSTask(dict):
79 return self['rusage']['ru_oublock'] 79 return self['rusage']['ru_oublock']
80 80
81 @classmethod 81 @classmethod
82 def from_file(cls, buildstat_file): 82 def from_file(cls, buildstat_file, fallback_end=0):
83 """Read buildstat text file""" 83 """Read buildstat text file. fallback_end is an optional end time for tasks that are not recorded as finishing."""
84 bs_task = cls() 84 bs_task = cls()
85 log.debug("Reading task buildstats from %s", buildstat_file) 85 log.debug("Reading task buildstats from %s", buildstat_file)
86 end_time = None 86 end_time = None
@@ -108,7 +108,10 @@ class BSTask(dict):
108 bs_task[ru_type][ru_key] = val 108 bs_task[ru_type][ru_key] = val
109 elif key == 'Status': 109 elif key == 'Status':
110 bs_task['status'] = val 110 bs_task['status'] = val
111 if end_time is not None and start_time is not None: 111 # If the task didn't finish, fill in the fallback end time if specified
112 if start_time and not end_time and fallback_end:
113 end_time = fallback_end
114 if start_time and end_time:
112 bs_task['elapsed_time'] = end_time - start_time 115 bs_task['elapsed_time'] = end_time - start_time
113 else: 116 else:
114 raise BSError("{} looks like a invalid buildstats file".format(buildstat_file)) 117 raise BSError("{} looks like a invalid buildstats file".format(buildstat_file))
@@ -226,25 +229,44 @@ class BuildStats(dict):
226 epoch = match.group('epoch') 229 epoch = match.group('epoch')
227 return name, epoch, version, revision 230 return name, epoch, version, revision
228 231
232 @staticmethod
233 def parse_top_build_stats(path):
234 """
235 Parse the top-level build_stats file for build-wide start and duration.
236 """
237 start = elapsed = 0
238 with open(path) as fobj:
239 for line in fobj.readlines():
240 key, val = line.split(':', 1)
241 val = val.strip()
242 if key == 'Build Started':
243 start = float(val)
244 elif key == "Elapsed time":
245 elapsed = float(val.split()[0])
246 return start, elapsed
247
229 @classmethod 248 @classmethod
230 def from_dir(cls, path): 249 def from_dir(cls, path):
231 """Load buildstats from a buildstats directory""" 250 """Load buildstats from a buildstats directory"""
232 if not os.path.isfile(os.path.join(path, 'build_stats')): 251 top_stats = os.path.join(path, 'build_stats')
252 if not os.path.isfile(top_stats):
233 raise BSError("{} does not look like a buildstats directory".format(path)) 253 raise BSError("{} does not look like a buildstats directory".format(path))
234 254
235 log.debug("Reading buildstats directory %s", path) 255 log.debug("Reading buildstats directory %s", path)
236
237 buildstats = cls() 256 buildstats = cls()
257 build_started, build_elapsed = buildstats.parse_top_build_stats(top_stats)
258 build_end = build_started + build_elapsed
259
238 subdirs = os.listdir(path) 260 subdirs = os.listdir(path)
239 for dirname in subdirs: 261 for dirname in subdirs:
240 recipe_dir = os.path.join(path, dirname) 262 recipe_dir = os.path.join(path, dirname)
241 if not os.path.isdir(recipe_dir): 263 if dirname == "reduced_proc_pressure" or not os.path.isdir(recipe_dir):
242 continue 264 continue
243 name, epoch, version, revision = cls.split_nevr(dirname) 265 name, epoch, version, revision = cls.split_nevr(dirname)
244 bsrecipe = BSRecipe(name, epoch, version, revision) 266 bsrecipe = BSRecipe(name, epoch, version, revision)
245 for task in os.listdir(recipe_dir): 267 for task in os.listdir(recipe_dir):
246 bsrecipe.tasks[task] = BSTask.from_file( 268 bsrecipe.tasks[task] = BSTask.from_file(
247 os.path.join(recipe_dir, task)) 269 os.path.join(recipe_dir, task), build_end)
248 if name in buildstats: 270 if name in buildstats:
249 raise BSError("Cannot handle multiple versions of the same " 271 raise BSError("Cannot handle multiple versions of the same "
250 "package ({})".format(name)) 272 "package ({})".format(name))
diff --git a/scripts/lib/checklayer/__init__.py b/scripts/lib/checklayer/__init__.py
index fe545607bb..62ecdfe390 100644
--- a/scripts/lib/checklayer/__init__.py
+++ b/scripts/lib/checklayer/__init__.py
@@ -16,6 +16,7 @@ class LayerType(Enum):
16 BSP = 0 16 BSP = 0
17 DISTRO = 1 17 DISTRO = 1
18 SOFTWARE = 2 18 SOFTWARE = 2
19 CORE = 3
19 ERROR_NO_LAYER_CONF = 98 20 ERROR_NO_LAYER_CONF = 98
20 ERROR_BSP_DISTRO = 99 21 ERROR_BSP_DISTRO = 99
21 22
@@ -43,7 +44,7 @@ def _get_layer_collections(layer_path, lconf=None, data=None):
43 44
44 ldata.setVar('LAYERDIR', layer_path) 45 ldata.setVar('LAYERDIR', layer_path)
45 try: 46 try:
46 ldata = bb.parse.handle(lconf, ldata, include=True) 47 ldata = bb.parse.handle(lconf, ldata, include=True, baseconfig=True)
47 except: 48 except:
48 raise RuntimeError("Parsing of layer.conf from layer: %s failed" % layer_path) 49 raise RuntimeError("Parsing of layer.conf from layer: %s failed" % layer_path)
49 ldata.expandVarref('LAYERDIR') 50 ldata.expandVarref('LAYERDIR')
@@ -106,7 +107,13 @@ def _detect_layer(layer_path):
106 if distros: 107 if distros:
107 is_distro = True 108 is_distro = True
108 109
109 if is_bsp and is_distro: 110 layer['collections'] = _get_layer_collections(layer['path'])
111
112 if layer_name == "meta" and "core" in layer['collections']:
113 layer['type'] = LayerType.CORE
114 layer['conf']['machines'] = machines
115 layer['conf']['distros'] = distros
116 elif is_bsp and is_distro:
110 layer['type'] = LayerType.ERROR_BSP_DISTRO 117 layer['type'] = LayerType.ERROR_BSP_DISTRO
111 elif is_bsp: 118 elif is_bsp:
112 layer['type'] = LayerType.BSP 119 layer['type'] = LayerType.BSP
@@ -117,8 +124,6 @@ def _detect_layer(layer_path):
117 else: 124 else:
118 layer['type'] = LayerType.SOFTWARE 125 layer['type'] = LayerType.SOFTWARE
119 126
120 layer['collections'] = _get_layer_collections(layer['path'])
121
122 return layer 127 return layer
123 128
124def detect_layers(layer_directories, no_auto): 129def detect_layers(layer_directories, no_auto):
@@ -146,7 +151,7 @@ def detect_layers(layer_directories, no_auto):
146 151
147 return layers 152 return layers
148 153
149def _find_layer_depends(depend, layers): 154def _find_layer(depend, layers):
150 for layer in layers: 155 for layer in layers:
151 if 'collections' not in layer: 156 if 'collections' not in layer:
152 continue 157 continue
@@ -156,7 +161,28 @@ def _find_layer_depends(depend, layers):
156 return layer 161 return layer
157 return None 162 return None
158 163
159def add_layer_dependencies(bblayersconf, layer, layers, logger): 164def sanity_check_layers(layers, logger):
165 """
166 Check that we didn't find duplicate collection names, as the layer that will
167 be used is non-deterministic. The precise check is duplicate collections
168 with different patterns, as the same pattern being repeated won't cause
169 problems.
170 """
171 import collections
172
173 passed = True
174 seen = collections.defaultdict(set)
175 for layer in layers:
176 for name, data in layer.get("collections", {}).items():
177 seen[name].add(data["pattern"])
178
179 for name, patterns in seen.items():
180 if len(patterns) > 1:
181 passed = False
182 logger.error("Collection %s found multiple times: %s" % (name, ", ".join(patterns)))
183 return passed
184
185def get_layer_dependencies(layer, layers, logger):
160 def recurse_dependencies(depends, layer, layers, logger, ret = []): 186 def recurse_dependencies(depends, layer, layers, logger, ret = []):
161 logger.debug('Processing dependencies %s for layer %s.' % \ 187 logger.debug('Processing dependencies %s for layer %s.' % \
162 (depends, layer['name'])) 188 (depends, layer['name']))
@@ -166,7 +192,7 @@ def add_layer_dependencies(bblayersconf, layer, layers, logger):
166 if depend == 'core': 192 if depend == 'core':
167 continue 193 continue
168 194
169 layer_depend = _find_layer_depends(depend, layers) 195 layer_depend = _find_layer(depend, layers)
170 if not layer_depend: 196 if not layer_depend:
171 logger.error('Layer %s depends on %s and isn\'t found.' % \ 197 logger.error('Layer %s depends on %s and isn\'t found.' % \
172 (layer['name'], depend)) 198 (layer['name'], depend))
@@ -203,6 +229,11 @@ def add_layer_dependencies(bblayersconf, layer, layers, logger):
203 layer_depends = recurse_dependencies(depends, layer, layers, logger, layer_depends) 229 layer_depends = recurse_dependencies(depends, layer, layers, logger, layer_depends)
204 230
205 # Note: [] (empty) is allowed, None is not! 231 # Note: [] (empty) is allowed, None is not!
232 return layer_depends
233
234def add_layer_dependencies(bblayersconf, layer, layers, logger):
235
236 layer_depends = get_layer_dependencies(layer, layers, logger)
206 if layer_depends is None: 237 if layer_depends is None:
207 return False 238 return False
208 else: 239 else:
@@ -256,7 +287,7 @@ def check_command(error_msg, cmd, cwd=None):
256 raise RuntimeError(msg) 287 raise RuntimeError(msg)
257 return output 288 return output
258 289
259def get_signatures(builddir, failsafe=False, machine=None): 290def get_signatures(builddir, failsafe=False, machine=None, extravars=None):
260 import re 291 import re
261 292
262 # some recipes needs to be excluded like meta-world-pkgdata 293 # some recipes needs to be excluded like meta-world-pkgdata
@@ -267,13 +298,16 @@ def get_signatures(builddir, failsafe=False, machine=None):
267 sigs = {} 298 sigs = {}
268 tune2tasks = {} 299 tune2tasks = {}
269 300
270 cmd = 'BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" ' 301 cmd = 'BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" '
302 if extravars:
303 cmd += extravars
304 cmd += ' '
271 if machine: 305 if machine:
272 cmd += 'MACHINE=%s ' % machine 306 cmd += 'MACHINE=%s ' % machine
273 cmd += 'bitbake ' 307 cmd += 'bitbake '
274 if failsafe: 308 if failsafe:
275 cmd += '-k ' 309 cmd += '-k '
276 cmd += '-S none world' 310 cmd += '-S lockedsigs world'
277 sigs_file = os.path.join(builddir, 'locked-sigs.inc') 311 sigs_file = os.path.join(builddir, 'locked-sigs.inc')
278 if os.path.exists(sigs_file): 312 if os.path.exists(sigs_file):
279 os.unlink(sigs_file) 313 os.unlink(sigs_file)
@@ -290,8 +324,8 @@ def get_signatures(builddir, failsafe=False, machine=None):
290 else: 324 else:
291 raise 325 raise
292 326
293 sig_regex = re.compile("^(?P<task>.*:.*):(?P<hash>.*) .$") 327 sig_regex = re.compile(r"^(?P<task>.*:.*):(?P<hash>.*) .$")
294 tune_regex = re.compile("(^|\s)SIGGEN_LOCKEDSIGS_t-(?P<tune>\S*)\s*=\s*") 328 tune_regex = re.compile(r"(^|\s)SIGGEN_LOCKEDSIGS_t-(?P<tune>\S*)\s*=\s*")
295 current_tune = None 329 current_tune = None
296 with open(sigs_file, 'r') as f: 330 with open(sigs_file, 'r') as f:
297 for line in f.readlines(): 331 for line in f.readlines():
diff --git a/scripts/lib/checklayer/cases/bsp.py b/scripts/lib/checklayer/cases/bsp.py
index 7fd56f5d36..b76163fb56 100644
--- a/scripts/lib/checklayer/cases/bsp.py
+++ b/scripts/lib/checklayer/cases/bsp.py
@@ -11,7 +11,7 @@ from checklayer.case import OECheckLayerTestCase
11class BSPCheckLayer(OECheckLayerTestCase): 11class BSPCheckLayer(OECheckLayerTestCase):
12 @classmethod 12 @classmethod
13 def setUpClass(self): 13 def setUpClass(self):
14 if self.tc.layer['type'] != LayerType.BSP: 14 if self.tc.layer['type'] not in (LayerType.BSP, LayerType.CORE):
15 raise unittest.SkipTest("BSPCheckLayer: Layer %s isn't BSP one." %\ 15 raise unittest.SkipTest("BSPCheckLayer: Layer %s isn't BSP one." %\
16 self.tc.layer['name']) 16 self.tc.layer['name'])
17 17
@@ -153,7 +153,7 @@ class BSPCheckLayer(OECheckLayerTestCase):
153 # do_build can be ignored: it is know to have 153 # do_build can be ignored: it is know to have
154 # different signatures in some cases, for example in 154 # different signatures in some cases, for example in
155 # the allarch ca-certificates due to RDEPENDS=openssl. 155 # the allarch ca-certificates due to RDEPENDS=openssl.
156 # That particular dependency is whitelisted via 156 # That particular dependency is marked via
157 # SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS, but still shows up 157 # SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS, but still shows up
158 # in the sstate signature hash because filtering it 158 # in the sstate signature hash because filtering it
159 # out would be hard and running do_build multiple 159 # out would be hard and running do_build multiple
diff --git a/scripts/lib/checklayer/cases/common.py b/scripts/lib/checklayer/cases/common.py
index b82304e361..97b16f78c8 100644
--- a/scripts/lib/checklayer/cases/common.py
+++ b/scripts/lib/checklayer/cases/common.py
@@ -6,15 +6,19 @@
6import glob 6import glob
7import os 7import os
8import unittest 8import unittest
9import re
9from checklayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures 10from checklayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures
10from checklayer.case import OECheckLayerTestCase 11from checklayer.case import OECheckLayerTestCase
11 12
12class CommonCheckLayer(OECheckLayerTestCase): 13class CommonCheckLayer(OECheckLayerTestCase):
13 def test_readme(self): 14 def test_readme(self):
15 if self.tc.layer['type'] == LayerType.CORE:
16 raise unittest.SkipTest("Core layer's README is top level")
17
14 # The top-level README file may have a suffix (like README.rst or README.txt). 18 # The top-level README file may have a suffix (like README.rst or README.txt).
15 readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*')) 19 readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*'))
16 self.assertTrue(len(readme_files) > 0, 20 self.assertTrue(len(readme_files) > 0,
17 msg="Layer doesn't contains README file.") 21 msg="Layer doesn't contain a README file.")
18 22
19 # There might be more than one file matching the file pattern above 23 # There might be more than one file matching the file pattern above
20 # (for example, README.rst and README-COPYING.rst). The one with the shortest 24 # (for example, README.rst and README-COPYING.rst). The one with the shortest
@@ -26,6 +30,16 @@ class CommonCheckLayer(OECheckLayerTestCase):
26 self.assertTrue(data, 30 self.assertTrue(data,
27 msg="Layer contains a README file but it is empty.") 31 msg="Layer contains a README file but it is empty.")
28 32
33 # If a layer's README references another README, then the checks below are not valid
34 if re.search('README', data, re.IGNORECASE):
35 return
36
37 self.assertIn('maintainer', data.lower())
38 self.assertIn('patch', data.lower())
39 # Check that there is an email address in the README
40 email_regex = re.compile(r"[^@]+@[^@]+")
41 self.assertTrue(email_regex.match(data))
42
29 def test_parse(self): 43 def test_parse(self):
30 check_command('Layer %s failed to parse.' % self.tc.layer['name'], 44 check_command('Layer %s failed to parse.' % self.tc.layer['name'],
31 'bitbake -p') 45 'bitbake -p')
@@ -43,6 +57,36 @@ class CommonCheckLayer(OECheckLayerTestCase):
43 ''' 57 '''
44 get_signatures(self.td['builddir'], failsafe=False) 58 get_signatures(self.td['builddir'], failsafe=False)
45 59
60 def test_world_inherit_class(self):
61 '''
62 This also does "bitbake -S none world" along with inheriting "yocto-check-layer"
63 class, which can do additional per-recipe test cases.
64 '''
65 msg = []
66 try:
67 get_signatures(self.td['builddir'], failsafe=False, machine=None, extravars='BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS INHERIT" INHERIT="yocto-check-layer"')
68 except RuntimeError as ex:
69 msg.append(str(ex))
70 if msg:
71 msg.insert(0, 'Layer %s failed additional checks from yocto-check-layer.bbclass\nSee below log for specific recipe parsing errors:\n' % \
72 self.tc.layer['name'])
73 self.fail('\n'.join(msg))
74
75 @unittest.expectedFailure
76 def test_patches_upstream_status(self):
77 import sys
78 sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/'))
79 import oe.qa
80 patches = []
81 for dirpath, dirs, files in os.walk(self.tc.layer['path']):
82 for filename in files:
83 if filename.endswith(".patch"):
84 ppath = os.path.join(dirpath, filename)
85 if oe.qa.check_upstream_status(ppath):
86 patches.append(ppath)
87 self.assertEqual(len(patches), 0 , \
88 msg="Found following patches with malformed or missing upstream status:\n%s" % '\n'.join([str(patch) for patch in patches]))
89
46 def test_signatures(self): 90 def test_signatures(self):
47 if self.tc.layer['type'] == LayerType.SOFTWARE and \ 91 if self.tc.layer['type'] == LayerType.SOFTWARE and \
48 not self.tc.test_software_layer_signatures: 92 not self.tc.test_software_layer_signatures:
diff --git a/scripts/lib/checklayer/cases/distro.py b/scripts/lib/checklayer/cases/distro.py
index f0bee5493c..a35332451c 100644
--- a/scripts/lib/checklayer/cases/distro.py
+++ b/scripts/lib/checklayer/cases/distro.py
@@ -11,7 +11,7 @@ from checklayer.case import OECheckLayerTestCase
11class DistroCheckLayer(OECheckLayerTestCase): 11class DistroCheckLayer(OECheckLayerTestCase):
12 @classmethod 12 @classmethod
13 def setUpClass(self): 13 def setUpClass(self):
14 if self.tc.layer['type'] != LayerType.DISTRO: 14 if self.tc.layer['type'] not in (LayerType.DISTRO, LayerType.CORE):
15 raise unittest.SkipTest("DistroCheckLayer: Layer %s isn't Distro one." %\ 15 raise unittest.SkipTest("DistroCheckLayer: Layer %s isn't Distro one." %\
16 self.tc.layer['name']) 16 self.tc.layer['name'])
17 17
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py
index 702db669de..6133c1c5b4 100644
--- a/scripts/lib/devtool/__init__.py
+++ b/scripts/lib/devtool/__init__.py
@@ -78,12 +78,15 @@ def exec_fakeroot(d, cmd, **kwargs):
78 """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions""" 78 """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions"""
79 # Grab the command and check it actually exists 79 # Grab the command and check it actually exists
80 fakerootcmd = d.getVar('FAKEROOTCMD') 80 fakerootcmd = d.getVar('FAKEROOTCMD')
81 fakerootenv = d.getVar('FAKEROOTENV')
82 exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, kwargs)
83
84def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs):
81 if not os.path.exists(fakerootcmd): 85 if not os.path.exists(fakerootcmd):
82 logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built') 86 logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built')
83 return 2 87 return 2
84 # Set up the appropriate environment 88 # Set up the appropriate environment
85 newenv = dict(os.environ) 89 newenv = dict(os.environ)
86 fakerootenv = d.getVar('FAKEROOTENV')
87 for varvalue in fakerootenv.split(): 90 for varvalue in fakerootenv.split():
88 if '=' in varvalue: 91 if '=' in varvalue:
89 splitval = varvalue.split('=', 1) 92 splitval = varvalue.split('=', 1)
@@ -233,6 +236,28 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
233 bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) 236 bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
234 bb.process.run('git tag -f %s' % basetag, cwd=repodir) 237 bb.process.run('git tag -f %s' % basetag, cwd=repodir)
235 238
239 # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now,
240 # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe
241 stdout, _ = bb.process.run("git status --porcelain", cwd=repodir)
242 found = False
243 for line in stdout.splitlines():
244 if line.endswith("/"):
245 new_dir = line.split()[1]
246 for root, dirs, files in os.walk(os.path.join(repodir, new_dir)):
247 if ".git" in dirs + files:
248 (stdout, _) = bb.process.run('git remote', cwd=root)
249 remote = stdout.splitlines()[0]
250 (stdout, _) = bb.process.run('git remote get-url %s' % remote, cwd=root)
251 remote_url = stdout.splitlines()[0]
252 logger.error(os.path.relpath(os.path.join(root, ".."), root))
253 bb.process.run('git submodule add %s %s' % (remote_url, os.path.relpath(root, os.path.join(root, ".."))), cwd=os.path.join(root, ".."))
254 found = True
255 if found:
256 oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d)
257 found = False
258 if os.path.exists(os.path.join(repodir, '.gitmodules')):
259 bb.process.run('git submodule foreach --recursive "git tag -f %s"' % basetag, cwd=repodir)
260
236def recipe_to_append(recipefile, config, wildcard=False): 261def recipe_to_append(recipefile, config, wildcard=False):
237 """ 262 """
238 Convert a recipe file to a bbappend file path within the workspace. 263 Convert a recipe file to a bbappend file path within the workspace.
diff --git a/scripts/lib/devtool/build_image.py b/scripts/lib/devtool/build_image.py
index 9388abbacf..980f90ddd6 100644
--- a/scripts/lib/devtool/build_image.py
+++ b/scripts/lib/devtool/build_image.py
@@ -113,7 +113,7 @@ def build_image_task(config, basepath, workspace, image, add_packages=None, task
113 with open(appendfile, 'w') as afile: 113 with open(appendfile, 'w') as afile:
114 if packages: 114 if packages:
115 # include packages from workspace recipes into the image 115 # include packages from workspace recipes into the image
116 afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(packages)) 116 afile.write('IMAGE_INSTALL:append = " %s"\n' % ' '.join(packages))
117 if not task: 117 if not task:
118 logger.info('Building image %s with the following ' 118 logger.info('Building image %s with the following '
119 'additional packages: %s', image, ' '.join(packages)) 119 'additional packages: %s', image, ' '.join(packages))
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py
index 6fe02fff2a..1cd4831d2b 100644
--- a/scripts/lib/devtool/build_sdk.py
+++ b/scripts/lib/devtool/build_sdk.py
@@ -13,7 +13,7 @@ import shutil
13import errno 13import errno
14import sys 14import sys
15import tempfile 15import tempfile
16from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError 16from devtool import DevtoolError
17from devtool import build_image 17from devtool import build_image
18 18
19logger = logging.getLogger('devtool') 19logger = logging.getLogger('devtool')
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py
index e5af2c95ae..b5ca8f2c2f 100644
--- a/scripts/lib/devtool/deploy.py
+++ b/scripts/lib/devtool/deploy.py
@@ -16,7 +16,7 @@ import bb.utils
16import argparse_oe 16import argparse_oe
17import oe.types 17import oe.types
18 18
19from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError 19from devtool import exec_fakeroot_no_d, setup_tinfoil, check_workspace_recipe, DevtoolError
20 20
21logger = logging.getLogger('devtool') 21logger = logging.getLogger('devtool')
22 22
@@ -133,16 +133,38 @@ def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=Fals
133 133
134 return '\n'.join(lines) 134 return '\n'.join(lines)
135 135
136
137
138def deploy(args, config, basepath, workspace): 136def deploy(args, config, basepath, workspace):
139 """Entry point for the devtool 'deploy' subcommand""" 137 """Entry point for the devtool 'deploy' subcommand"""
140 import math 138 import oe.utils
141 import oe.recipeutils
142 import oe.package
143 139
144 check_workspace_recipe(workspace, args.recipename, checksrc=False) 140 check_workspace_recipe(workspace, args.recipename, checksrc=False)
145 141
142 tinfoil = setup_tinfoil(basepath=basepath)
143 try:
144 try:
145 rd = tinfoil.parse_recipe(args.recipename)
146 except Exception as e:
147 raise DevtoolError('Exception parsing recipe %s: %s' %
148 (args.recipename, e))
149
150 srcdir = rd.getVar('D')
151 workdir = rd.getVar('WORKDIR')
152 path = rd.getVar('PATH')
153 strip_cmd = rd.getVar('STRIP')
154 libdir = rd.getVar('libdir')
155 base_libdir = rd.getVar('base_libdir')
156 max_process = oe.utils.get_bb_number_threads(rd)
157 fakerootcmd = rd.getVar('FAKEROOTCMD')
158 fakerootenv = rd.getVar('FAKEROOTENV')
159 finally:
160 tinfoil.shutdown()
161
162 return deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args)
163
164def deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args):
165 import math
166 import oe.package
167
146 try: 168 try:
147 host, destdir = args.target.split(':') 169 host, destdir = args.target.split(':')
148 except ValueError: 170 except ValueError:
@@ -152,118 +174,108 @@ def deploy(args, config, basepath, workspace):
152 if not destdir.endswith('/'): 174 if not destdir.endswith('/'):
153 destdir += '/' 175 destdir += '/'
154 176
155 tinfoil = setup_tinfoil(basepath=basepath) 177 recipe_outdir = srcdir
156 try: 178 if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
157 try: 179 raise DevtoolError('No files to deploy - have you built the %s '
158 rd = tinfoil.parse_recipe(args.recipename) 180 'recipe? If so, the install step has not installed '
159 except Exception as e: 181 'any files.' % args.recipename)
160 raise DevtoolError('Exception parsing recipe %s: %s' % 182
161 (args.recipename, e)) 183 if args.strip and not args.dry_run:
162 recipe_outdir = rd.getVar('D') 184 # Fakeroot copy to new destination
163 if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): 185 srcdir = recipe_outdir
164 raise DevtoolError('No files to deploy - have you built the %s ' 186 recipe_outdir = os.path.join(workdir, 'devtool-deploy-target-stripped')
165 'recipe? If so, the install step has not installed ' 187 if os.path.isdir(recipe_outdir):
166 'any files.' % args.recipename) 188 exec_fakeroot_no_d(fakerootcmd, fakerootenv, "rm -rf %s" % recipe_outdir, shell=True)
167 189 exec_fakeroot_no_d(fakerootcmd, fakerootenv, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
168 if args.strip and not args.dry_run: 190 os.environ['PATH'] = ':'.join([os.environ['PATH'], path or ''])
169 # Fakeroot copy to new destination 191 oe.package.strip_execs(args.recipename, recipe_outdir, strip_cmd, libdir, base_libdir, max_process)
170 srcdir = recipe_outdir 192
171 recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'deploy-target-stripped') 193 filelist = []
172 if os.path.isdir(recipe_outdir): 194 inodes = set({})
173 bb.utils.remove(recipe_outdir, True) 195 ftotalsize = 0
174 exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True) 196 for root, _, files in os.walk(recipe_outdir):
175 os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or '']) 197 for fn in files:
176 oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'), 198 fstat = os.lstat(os.path.join(root, fn))
177 rd.getVar('base_libdir'), rd) 199 # Get the size in kiB (since we'll be comparing it to the output of du -k)
178 200 # MUST use lstat() here not stat() or getfilesize() since we don't want to
179 filelist = [] 201 # dereference symlinks
180 inodes = set({}) 202 if fstat.st_ino in inodes:
181 ftotalsize = 0 203 fsize = 0
182 for root, _, files in os.walk(recipe_outdir): 204 else:
183 for fn in files: 205 fsize = int(math.ceil(float(fstat.st_size)/1024))
184 fstat = os.lstat(os.path.join(root, fn)) 206 inodes.add(fstat.st_ino)
185 # Get the size in kiB (since we'll be comparing it to the output of du -k) 207 ftotalsize += fsize
186 # MUST use lstat() here not stat() or getfilesize() since we don't want to 208 # The path as it would appear on the target
187 # dereference symlinks 209 fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
188 if fstat.st_ino in inodes: 210 filelist.append((fpath, fsize))
189 fsize = 0 211
190 else: 212 if args.dry_run:
191 fsize = int(math.ceil(float(fstat.st_size)/1024)) 213 print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
192 inodes.add(fstat.st_ino) 214 for item, _ in filelist:
193 ftotalsize += fsize 215 print(' %s' % item)
194 # The path as it would appear on the target 216 return 0
195 fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
196 filelist.append((fpath, fsize))
197
198 if args.dry_run:
199 print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
200 for item, _ in filelist:
201 print(' %s' % item)
202 return 0
203
204 extraoptions = ''
205 if args.no_host_check:
206 extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
207 if not args.show_status:
208 extraoptions += ' -q'
209
210 scp_sshexec = ''
211 ssh_sshexec = 'ssh'
212 if args.ssh_exec:
213 scp_sshexec = "-S %s" % args.ssh_exec
214 ssh_sshexec = args.ssh_exec
215 scp_port = ''
216 ssh_port = ''
217 if args.port:
218 scp_port = "-P %s" % args.port
219 ssh_port = "-p %s" % args.port
220
221 if args.key:
222 extraoptions += ' -i %s' % args.key
223
224 # In order to delete previously deployed files and have the manifest file on
225 # the target, we write out a shell script and then copy it to the target
226 # so we can then run it (piping tar output to it).
227 # (We cannot use scp here, because it doesn't preserve symlinks.)
228 tmpdir = tempfile.mkdtemp(prefix='devtool')
229 try:
230 tmpscript = '/tmp/devtool_deploy.sh'
231 tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
232 shellscript = _prepare_remote_script(deploy=True,
233 verbose=args.show_status,
234 nopreserve=args.no_preserve,
235 nocheckspace=args.no_check_space)
236 # Write out the script to a file
237 with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
238 f.write(shellscript)
239 # Write out the file list
240 with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
241 f.write('%d\n' % ftotalsize)
242 for fpath, fsize in filelist:
243 f.write('%s %d\n' % (fpath, fsize))
244 # Copy them to the target
245 ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
246 if ret != 0:
247 raise DevtoolError('Failed to copy script to %s - rerun with -s to '
248 'get a complete error message' % args.target)
249 finally:
250 shutil.rmtree(tmpdir)
251 217
252 # Now run the script 218 extraoptions = ''
253 ret = exec_fakeroot(rd, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True) 219 if args.no_host_check:
254 if ret != 0: 220 extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
255 raise DevtoolError('Deploy failed - rerun with -s to get a complete ' 221 if not args.show_status:
256 'error message') 222 extraoptions += ' -q'
257 223
258 logger.info('Successfully deployed %s' % recipe_outdir) 224 scp_sshexec = ''
225 ssh_sshexec = 'ssh'
226 if args.ssh_exec:
227 scp_sshexec = "-S %s" % args.ssh_exec
228 ssh_sshexec = args.ssh_exec
229 scp_port = ''
230 ssh_port = ''
231 if args.port:
232 scp_port = "-P %s" % args.port
233 ssh_port = "-p %s" % args.port
234
235 if args.key:
236 extraoptions += ' -i %s' % args.key
259 237
260 files_list = [] 238 # In order to delete previously deployed files and have the manifest file on
261 for root, _, files in os.walk(recipe_outdir): 239 # the target, we write out a shell script and then copy it to the target
262 for filename in files: 240 # so we can then run it (piping tar output to it).
263 filename = os.path.relpath(os.path.join(root, filename), recipe_outdir) 241 # (We cannot use scp here, because it doesn't preserve symlinks.)
264 files_list.append(os.path.join(destdir, filename)) 242 tmpdir = tempfile.mkdtemp(prefix='devtool')
243 try:
244 tmpscript = '/tmp/devtool_deploy.sh'
245 tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
246 shellscript = _prepare_remote_script(deploy=True,
247 verbose=args.show_status,
248 nopreserve=args.no_preserve,
249 nocheckspace=args.no_check_space)
250 # Write out the script to a file
251 with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
252 f.write(shellscript)
253 # Write out the file list
254 with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
255 f.write('%d\n' % ftotalsize)
256 for fpath, fsize in filelist:
257 f.write('%s %d\n' % (fpath, fsize))
258 # Copy them to the target
259 ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
260 if ret != 0:
261 raise DevtoolError('Failed to copy script to %s - rerun with -s to '
262 'get a complete error message' % args.target)
265 finally: 263 finally:
266 tinfoil.shutdown() 264 shutil.rmtree(tmpdir)
265
266 # Now run the script
267 ret = exec_fakeroot_no_d(fakerootcmd, fakerootenv, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
268 if ret != 0:
269 raise DevtoolError('Deploy failed - rerun with -s to get a complete '
270 'error message')
271
272 logger.info('Successfully deployed %s' % recipe_outdir)
273
274 files_list = []
275 for root, _, files in os.walk(recipe_outdir):
276 for filename in files:
277 filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
278 files_list.append(os.path.join(destdir, filename))
267 279
268 return 0 280 return 0
269 281
diff --git a/scripts/lib/devtool/ide_plugins/__init__.py b/scripts/lib/devtool/ide_plugins/__init__.py
new file mode 100644
index 0000000000..19c2f61c5f
--- /dev/null
+++ b/scripts/lib/devtool/ide_plugins/__init__.py
@@ -0,0 +1,282 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk IDE plugin interface definition and helper functions"""
7
8import errno
9import json
10import logging
11import os
12import stat
13from enum import Enum, auto
14from devtool import DevtoolError
15from bb.utils import mkdirhier
16
17logger = logging.getLogger('devtool')
18
19
20class BuildTool(Enum):
21 UNDEFINED = auto()
22 CMAKE = auto()
23 MESON = auto()
24
25 @property
26 def is_c_ccp(self):
27 if self is BuildTool.CMAKE:
28 return True
29 if self is BuildTool.MESON:
30 return True
31 return False
32
33
34class GdbCrossConfig:
35 """Base class defining the GDB configuration generator interface
36
37 Generate a GDB configuration for a binary on the target device.
38 Only one instance per binary is allowed. This allows to assign unique port
39 numbers for all gdbserver instances.
40 """
41 _gdbserver_port_next = 1234
42 _binaries = []
43
44 def __init__(self, image_recipe, modified_recipe, binary, gdbserver_multi=True):
45 self.image_recipe = image_recipe
46 self.modified_recipe = modified_recipe
47 self.gdb_cross = modified_recipe.gdb_cross
48 self.binary = binary
49 if binary in GdbCrossConfig._binaries:
50 raise DevtoolError(
51 "gdbserver config for binary %s is already generated" % binary)
52 GdbCrossConfig._binaries.append(binary)
53 self.script_dir = modified_recipe.ide_sdk_scripts_dir
54 self.gdbinit_dir = os.path.join(self.script_dir, 'gdbinit')
55 self.gdbserver_multi = gdbserver_multi
56 self.binary_pretty = self.binary.replace(os.sep, '-').lstrip('-')
57 self.gdbserver_port = GdbCrossConfig._gdbserver_port_next
58 GdbCrossConfig._gdbserver_port_next += 1
59 self.id_pretty = "%d_%s" % (self.gdbserver_port, self.binary_pretty)
60 # gdbserver start script
61 gdbserver_script_file = 'gdbserver_' + self.id_pretty
62 if self.gdbserver_multi:
63 gdbserver_script_file += "_m"
64 self.gdbserver_script = os.path.join(
65 self.script_dir, gdbserver_script_file)
66 # gdbinit file
67 self.gdbinit = os.path.join(
68 self.gdbinit_dir, 'gdbinit_' + self.id_pretty)
69 # gdb start script
70 self.gdb_script = os.path.join(
71 self.script_dir, 'gdb_' + self.id_pretty)
72
73 def _gen_gdbserver_start_script(self):
74 """Generate a shell command starting the gdbserver on the remote device via ssh
75
76 GDB supports two modes:
77 multi: gdbserver remains running over several debug sessions
78 once: gdbserver terminates after the debugged process terminates
79 """
80 cmd_lines = ['#!/bin/sh']
81 if self.gdbserver_multi:
82 temp_dir = "TEMP_DIR=/tmp/gdbserver_%s; " % self.id_pretty
83 gdbserver_cmd_start = temp_dir
84 gdbserver_cmd_start += "test -f \\$TEMP_DIR/pid && exit 0; "
85 gdbserver_cmd_start += "mkdir -p \\$TEMP_DIR; "
86 gdbserver_cmd_start += "%s --multi :%s > \\$TEMP_DIR/log 2>&1 & " % (
87 self.gdb_cross.gdbserver_path, self.gdbserver_port)
88 gdbserver_cmd_start += "echo \\$! > \\$TEMP_DIR/pid;"
89
90 gdbserver_cmd_stop = temp_dir
91 gdbserver_cmd_stop += "test -f \\$TEMP_DIR/pid && kill \\$(cat \\$TEMP_DIR/pid); "
92 gdbserver_cmd_stop += "rm -rf \\$TEMP_DIR; "
93
94 gdbserver_cmd_l = []
95 gdbserver_cmd_l.append('if [ "$1" = "stop" ]; then')
96 gdbserver_cmd_l.append(' shift')
97 gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
98 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_stop))
99 gdbserver_cmd_l.append('else')
100 gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
101 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start))
102 gdbserver_cmd_l.append('fi')
103 gdbserver_cmd = os.linesep.join(gdbserver_cmd_l)
104 else:
105 gdbserver_cmd_start = "%s --once :%s %s" % (
106 self.gdb_cross.gdbserver_path, self.gdbserver_port, self.binary)
107 gdbserver_cmd = "%s %s %s %s 'sh -c \"%s\"'" % (
108 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start)
109 cmd_lines.append(gdbserver_cmd)
110 GdbCrossConfig.write_file(self.gdbserver_script, cmd_lines, True)
111
112 def _gen_gdbinit_config(self):
113 """Generate a gdbinit file for this binary and the corresponding gdbserver configuration"""
114 gdbinit_lines = ['# This file is generated by devtool ide-sdk']
115 if self.gdbserver_multi:
116 target_help = '# gdbserver --multi :%d' % self.gdbserver_port
117 remote_cmd = 'target extended-remote'
118 else:
119 target_help = '# gdbserver :%d %s' % (
120 self.gdbserver_port, self.binary)
121 remote_cmd = 'target remote'
122 gdbinit_lines.append('# On the remote target:')
123 gdbinit_lines.append(target_help)
124 gdbinit_lines.append('# On the build machine:')
125 gdbinit_lines.append('# cd ' + self.modified_recipe.real_srctree)
126 gdbinit_lines.append(
127 '# ' + self.gdb_cross.gdb + ' -ix ' + self.gdbinit)
128
129 gdbinit_lines.append('set sysroot ' + self.modified_recipe.d)
130 gdbinit_lines.append('set substitute-path "/usr/include" "' +
131 os.path.join(self.modified_recipe.recipe_sysroot, 'usr', 'include') + '"')
132 # Disable debuginfod for now, the IDE configuration uses rootfs-dbg from the image workdir.
133 gdbinit_lines.append('set debuginfod enabled off')
134 if self.image_recipe.rootfs_dbg:
135 gdbinit_lines.append(
136 'set solib-search-path "' + self.modified_recipe.solib_search_path_str(self.image_recipe) + '"')
137 # First: Search for sources of this recipe in the workspace folder
138 if self.modified_recipe.pn in self.modified_recipe.target_dbgsrc_dir:
139 gdbinit_lines.append('set substitute-path "%s" "%s"' %
140 (self.modified_recipe.target_dbgsrc_dir, self.modified_recipe.real_srctree))
141 else:
142 logger.error(
143 "TARGET_DBGSRC_DIR must contain the recipe name PN.")
144 # Second: Search for sources of other recipes in the rootfs-dbg
145 if self.modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
146 gdbinit_lines.append('set substitute-path "/usr/src/debug" "%s"' % os.path.join(
147 self.image_recipe.rootfs_dbg, "usr", "src", "debug"))
148 else:
149 logger.error(
150 "TARGET_DBGSRC_DIR must start with /usr/src/debug.")
151 else:
152 logger.warning(
153 "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
154 gdbinit_lines.append(
155 '%s %s:%d' % (remote_cmd, self.gdb_cross.host, self.gdbserver_port))
156 gdbinit_lines.append('set remote exec-file ' + self.binary)
157 gdbinit_lines.append(
158 'run ' + os.path.join(self.modified_recipe.d, self.binary))
159
160 GdbCrossConfig.write_file(self.gdbinit, gdbinit_lines)
161
162 def _gen_gdb_start_script(self):
163 """Generate a script starting GDB with the corresponding gdbinit configuration."""
164 cmd_lines = ['#!/bin/sh']
165 cmd_lines.append('cd ' + self.modified_recipe.real_srctree)
166 cmd_lines.append(self.gdb_cross.gdb + ' -ix ' +
167 self.gdbinit + ' "$@"')
168 GdbCrossConfig.write_file(self.gdb_script, cmd_lines, True)
169
170 def initialize(self):
171 self._gen_gdbserver_start_script()
172 self._gen_gdbinit_config()
173 self._gen_gdb_start_script()
174
175 @staticmethod
176 def write_file(script_file, cmd_lines, executable=False):
177 script_dir = os.path.dirname(script_file)
178 mkdirhier(script_dir)
179 with open(script_file, 'w') as script_f:
180 script_f.write(os.linesep.join(cmd_lines))
181 script_f.write(os.linesep)
182 if executable:
183 st = os.stat(script_file)
184 os.chmod(script_file, st.st_mode | stat.S_IEXEC)
185 logger.info("Created: %s" % script_file)
186
187
188class IdeBase:
189 """Base class defining the interface for IDE plugins"""
190
191 def __init__(self):
192 self.ide_name = 'undefined'
193 self.gdb_cross_configs = []
194
195 @classmethod
196 def ide_plugin_priority(cls):
197 """Used to find the default ide handler if --ide is not passed"""
198 return 10
199
200 def setup_shared_sysroots(self, shared_env):
201 logger.warn("Shared sysroot mode is not supported for IDE %s" %
202 self.ide_name)
203
204 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
205 logger.warn("Modified recipe mode is not supported for IDE %s" %
206 self.ide_name)
207
208 def initialize_gdb_cross_configs(self, image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfig):
209 binaries = modified_recipe.find_installed_binaries()
210 for binary in binaries:
211 gdb_cross_config = gdb_cross_config_class(
212 image_recipe, modified_recipe, binary)
213 gdb_cross_config.initialize()
214 self.gdb_cross_configs.append(gdb_cross_config)
215
216 @staticmethod
217 def gen_oe_scrtips_sym_link(modified_recipe):
218 # create a sym-link from sources to the scripts directory
219 if os.path.isdir(modified_recipe.ide_sdk_scripts_dir):
220 IdeBase.symlink_force(modified_recipe.ide_sdk_scripts_dir,
221 os.path.join(modified_recipe.real_srctree, 'oe-scripts'))
222
223 @staticmethod
224 def update_json_file(json_dir, json_file, update_dict):
225 """Update a json file
226
227 By default it uses the dict.update function. If this is not sutiable
228 the update function might be passed via update_func parameter.
229 """
230 json_path = os.path.join(json_dir, json_file)
231 logger.info("Updating IDE config file: %s (%s)" %
232 (json_file, json_path))
233 if not os.path.exists(json_dir):
234 os.makedirs(json_dir)
235 try:
236 with open(json_path) as f:
237 orig_dict = json.load(f)
238 except json.decoder.JSONDecodeError:
239 logger.info(
240 "Decoding %s failed. Probably because of comments in the json file" % json_path)
241 orig_dict = {}
242 except FileNotFoundError:
243 orig_dict = {}
244 orig_dict.update(update_dict)
245 with open(json_path, 'w') as f:
246 json.dump(orig_dict, f, indent=4)
247
248 @staticmethod
249 def symlink_force(tgt, dst):
250 try:
251 os.symlink(tgt, dst)
252 except OSError as err:
253 if err.errno == errno.EEXIST:
254 if os.readlink(dst) != tgt:
255 os.remove(dst)
256 os.symlink(tgt, dst)
257 else:
258 raise err
259
260
261def get_devtool_deploy_opts(args):
262 """Filter args for devtool deploy-target args"""
263 if not args.target:
264 return None
265 devtool_deploy_opts = [args.target]
266 if args.no_host_check:
267 devtool_deploy_opts += ["-c"]
268 if args.show_status:
269 devtool_deploy_opts += ["-s"]
270 if args.no_preserve:
271 devtool_deploy_opts += ["-p"]
272 if args.no_check_space:
273 devtool_deploy_opts += ["--no-check-space"]
274 if args.ssh_exec:
275 devtool_deploy_opts += ["-e", args.ssh.exec]
276 if args.port:
277 devtool_deploy_opts += ["-P", args.port]
278 if args.key:
279 devtool_deploy_opts += ["-I", args.key]
280 if args.strip is False:
281 devtool_deploy_opts += ["--no-strip"]
282 return devtool_deploy_opts
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py
new file mode 100644
index 0000000000..a62b93224e
--- /dev/null
+++ b/scripts/lib/devtool/ide_plugins/ide_code.py
@@ -0,0 +1,463 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk IDE plugin for VSCode and VSCodium"""
7
8import json
9import logging
10import os
11import shutil
12from devtool.ide_plugins import BuildTool, IdeBase, GdbCrossConfig, get_devtool_deploy_opts
13
14logger = logging.getLogger('devtool')
15
16
17class GdbCrossConfigVSCode(GdbCrossConfig):
18 def __init__(self, image_recipe, modified_recipe, binary):
19 super().__init__(image_recipe, modified_recipe, binary, False)
20
21 def initialize(self):
22 self._gen_gdbserver_start_script()
23
24
25class IdeVSCode(IdeBase):
26 """Manage IDE configurations for VSCode
27
28 Modified recipe mode:
29 - cmake: use the cmake-preset generated by devtool ide-sdk
30 - meson: meson is called via a wrapper script generated by devtool ide-sdk
31
32 Shared sysroot mode:
33 In shared sysroot mode, the cross tool-chain is exported to the user's global configuration.
34 A workspace cannot be created because there is no recipe that defines how a workspace could
35 be set up.
36 - cmake: adds a cmake-kit to .local/share/CMakeTools/cmake-tools-kits.json
37 The cmake-kit uses the environment script and the tool-chain file
38 generated by meta-ide-support.
39 - meson: Meson needs manual workspace configuration.
40 """
41
42 @classmethod
43 def ide_plugin_priority(cls):
44 """If --ide is not passed this is the default plugin"""
45 if shutil.which('code'):
46 return 100
47 return 0
48
49 def setup_shared_sysroots(self, shared_env):
50 """Expose the toolchain of the shared sysroots SDK"""
51 datadir = shared_env.ide_support.datadir
52 deploy_dir_image = shared_env.ide_support.deploy_dir_image
53 real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
54 standalone_sysroot_native = shared_env.build_sysroots.standalone_sysroot_native
55 vscode_ws_path = os.path.join(
56 os.environ['HOME'], '.local', 'share', 'CMakeTools')
57 cmake_kits_path = os.path.join(vscode_ws_path, 'cmake-tools-kits.json')
58 oecmake_generator = "Ninja"
59 env_script = os.path.join(
60 deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
61
62 if not os.path.isdir(vscode_ws_path):
63 os.makedirs(vscode_ws_path)
64 cmake_kits_old = []
65 if os.path.exists(cmake_kits_path):
66 with open(cmake_kits_path, 'r', encoding='utf-8') as cmake_kits_file:
67 cmake_kits_old = json.load(cmake_kits_file)
68 cmake_kits = cmake_kits_old.copy()
69
70 cmake_kit_new = {
71 "name": "OE " + real_multimach_target_sys,
72 "environmentSetupScript": env_script,
73 "toolchainFile": standalone_sysroot_native + datadir + "/cmake/OEToolchainConfig.cmake",
74 "preferredGenerator": {
75 "name": oecmake_generator
76 }
77 }
78
79 def merge_kit(cmake_kits, cmake_kit_new):
80 i = 0
81 while i < len(cmake_kits):
82 if 'environmentSetupScript' in cmake_kits[i] and \
83 cmake_kits[i]['environmentSetupScript'] == cmake_kit_new['environmentSetupScript']:
84 cmake_kits[i] = cmake_kit_new
85 return
86 i += 1
87 cmake_kits.append(cmake_kit_new)
88 merge_kit(cmake_kits, cmake_kit_new)
89
90 if cmake_kits != cmake_kits_old:
91 logger.info("Updating: %s" % cmake_kits_path)
92 with open(cmake_kits_path, 'w', encoding='utf-8') as cmake_kits_file:
93 json.dump(cmake_kits, cmake_kits_file, indent=4)
94 else:
95 logger.info("Already up to date: %s" % cmake_kits_path)
96
97 cmake_native = os.path.join(
98 shared_env.build_sysroots.standalone_sysroot_native, 'usr', 'bin', 'cmake')
99 if os.path.isfile(cmake_native):
100 logger.info('cmake-kits call cmake by default. If the cmake provided by this SDK should be used, please add the following line to ".vscode/settings.json" file: "cmake.cmakePath": "%s"' % cmake_native)
101 else:
102 logger.error("Cannot find cmake native at: %s" % cmake_native)
103
104 def dot_code_dir(self, modified_recipe):
105 return os.path.join(modified_recipe.srctree, '.vscode')
106
107 def __vscode_settings_meson(self, settings_dict, modified_recipe):
108 if modified_recipe.build_tool is not BuildTool.MESON:
109 return
110 settings_dict["mesonbuild.mesonPath"] = modified_recipe.meson_wrapper
111
112 confopts = modified_recipe.mesonopts.split()
113 confopts += modified_recipe.meson_cross_file.split()
114 confopts += modified_recipe.extra_oemeson.split()
115 settings_dict["mesonbuild.configureOptions"] = confopts
116 settings_dict["mesonbuild.buildFolder"] = modified_recipe.b
117
118 def __vscode_settings_cmake(self, settings_dict, modified_recipe):
119 """Add cmake specific settings to settings.json.
120
121 Note: most settings are passed to the cmake preset.
122 """
123 if modified_recipe.build_tool is not BuildTool.CMAKE:
124 return
125 settings_dict["cmake.configureOnOpen"] = True
126 settings_dict["cmake.sourceDirectory"] = modified_recipe.real_srctree
127
128 def vscode_settings(self, modified_recipe, image_recipe):
129 files_excludes = {
130 "**/.git/**": True,
131 "**/oe-logs/**": True,
132 "**/oe-workdir/**": True,
133 "**/source-date-epoch/**": True
134 }
135 python_exclude = [
136 "**/.git/**",
137 "**/oe-logs/**",
138 "**/oe-workdir/**",
139 "**/source-date-epoch/**"
140 ]
141 files_readonly = {
142 modified_recipe.recipe_sysroot + '/**': True,
143 modified_recipe.recipe_sysroot_native + '/**': True,
144 }
145 if image_recipe.rootfs_dbg is not None:
146 files_readonly[image_recipe.rootfs_dbg + '/**'] = True
147 settings_dict = {
148 "files.watcherExclude": files_excludes,
149 "files.exclude": files_excludes,
150 "files.readonlyInclude": files_readonly,
151 "python.analysis.exclude": python_exclude
152 }
153 self.__vscode_settings_cmake(settings_dict, modified_recipe)
154 self.__vscode_settings_meson(settings_dict, modified_recipe)
155
156 settings_file = 'settings.json'
157 IdeBase.update_json_file(
158 self.dot_code_dir(modified_recipe), settings_file, settings_dict)
159
160 def __vscode_extensions_cmake(self, modified_recipe, recommendations):
161 if modified_recipe.build_tool is not BuildTool.CMAKE:
162 return
163 recommendations += [
164 "twxs.cmake",
165 "ms-vscode.cmake-tools",
166 "ms-vscode.cpptools",
167 "ms-vscode.cpptools-extension-pack",
168 "ms-vscode.cpptools-themes"
169 ]
170
171 def __vscode_extensions_meson(self, modified_recipe, recommendations):
172 if modified_recipe.build_tool is not BuildTool.MESON:
173 return
174 recommendations += [
175 'mesonbuild.mesonbuild',
176 "ms-vscode.cpptools",
177 "ms-vscode.cpptools-extension-pack",
178 "ms-vscode.cpptools-themes"
179 ]
180
181 def vscode_extensions(self, modified_recipe):
182 recommendations = []
183 self.__vscode_extensions_cmake(modified_recipe, recommendations)
184 self.__vscode_extensions_meson(modified_recipe, recommendations)
185 extensions_file = 'extensions.json'
186 IdeBase.update_json_file(
187 self.dot_code_dir(modified_recipe), extensions_file, {"recommendations": recommendations})
188
189 def vscode_c_cpp_properties(self, modified_recipe):
190 properties_dict = {
191 "name": modified_recipe.recipe_id_pretty,
192 }
193 if modified_recipe.build_tool is BuildTool.CMAKE:
194 properties_dict["configurationProvider"] = "ms-vscode.cmake-tools"
195 elif modified_recipe.build_tool is BuildTool.MESON:
196 properties_dict["configurationProvider"] = "mesonbuild.mesonbuild"
197 properties_dict["compilerPath"] = os.path.join(modified_recipe.staging_bindir_toolchain, modified_recipe.cxx.split()[0])
198 else: # no C/C++ build
199 return
200
201 properties_dicts = {
202 "configurations": [
203 properties_dict
204 ],
205 "version": 4
206 }
207 prop_file = 'c_cpp_properties.json'
208 IdeBase.update_json_file(
209 self.dot_code_dir(modified_recipe), prop_file, properties_dicts)
210
211 def vscode_launch_bin_dbg(self, gdb_cross_config):
212 modified_recipe = gdb_cross_config.modified_recipe
213
214 launch_config = {
215 "name": gdb_cross_config.id_pretty,
216 "type": "cppdbg",
217 "request": "launch",
218 "program": os.path.join(modified_recipe.d, gdb_cross_config.binary.lstrip('/')),
219 "stopAtEntry": True,
220 "cwd": "${workspaceFolder}",
221 "environment": [],
222 "externalConsole": False,
223 "MIMode": "gdb",
224 "preLaunchTask": gdb_cross_config.id_pretty,
225 "miDebuggerPath": modified_recipe.gdb_cross.gdb,
226 "miDebuggerServerAddress": "%s:%d" % (modified_recipe.gdb_cross.host, gdb_cross_config.gdbserver_port)
227 }
228
229 # Search for header files in recipe-sysroot.
230 src_file_map = {
231 "/usr/include": os.path.join(modified_recipe.recipe_sysroot, "usr", "include")
232 }
233 # First of all search for not stripped binaries in the image folder.
234 # These binaries are copied (and optionally stripped) by deploy-target
235 setup_commands = [
236 {
237 "description": "sysroot",
238 "text": "set sysroot " + modified_recipe.d
239 }
240 ]
241
242 if gdb_cross_config.image_recipe.rootfs_dbg:
243 launch_config['additionalSOLibSearchPath'] = modified_recipe.solib_search_path_str(
244 gdb_cross_config.image_recipe)
245 # First: Search for sources of this recipe in the workspace folder
246 if modified_recipe.pn in modified_recipe.target_dbgsrc_dir:
247 src_file_map[modified_recipe.target_dbgsrc_dir] = "${workspaceFolder}"
248 else:
249 logger.error(
250 "TARGET_DBGSRC_DIR must contain the recipe name PN.")
251 # Second: Search for sources of other recipes in the rootfs-dbg
252 if modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
253 src_file_map["/usr/src/debug"] = os.path.join(
254 gdb_cross_config.image_recipe.rootfs_dbg, "usr", "src", "debug")
255 else:
256 logger.error(
257 "TARGET_DBGSRC_DIR must start with /usr/src/debug.")
258 else:
259 logger.warning(
260 "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
261
262 launch_config['sourceFileMap'] = src_file_map
263 launch_config['setupCommands'] = setup_commands
264 return launch_config
265
266 def vscode_launch(self, modified_recipe):
267 """GDB Launch configuration for binaries (elf files)"""
268
269 configurations = []
270 for gdb_cross_config in self.gdb_cross_configs:
271 if gdb_cross_config.modified_recipe is modified_recipe:
272 configurations.append(self.vscode_launch_bin_dbg(gdb_cross_config))
273 launch_dict = {
274 "version": "0.2.0",
275 "configurations": configurations
276 }
277 launch_file = 'launch.json'
278 IdeBase.update_json_file(
279 self.dot_code_dir(modified_recipe), launch_file, launch_dict)
280
281 def vscode_tasks_cpp(self, args, modified_recipe):
282 run_install_deploy = modified_recipe.gen_install_deploy_script(args)
283 install_task_name = "install && deploy-target %s" % modified_recipe.recipe_id_pretty
284 tasks_dict = {
285 "version": "2.0.0",
286 "tasks": [
287 {
288 "label": install_task_name,
289 "type": "shell",
290 "command": run_install_deploy,
291 "problemMatcher": []
292 }
293 ]
294 }
295 for gdb_cross_config in self.gdb_cross_configs:
296 if gdb_cross_config.modified_recipe is not modified_recipe:
297 continue
298 tasks_dict['tasks'].append(
299 {
300 "label": gdb_cross_config.id_pretty,
301 "type": "shell",
302 "isBackground": True,
303 "dependsOn": [
304 install_task_name
305 ],
306 "command": gdb_cross_config.gdbserver_script,
307 "problemMatcher": [
308 {
309 "pattern": [
310 {
311 "regexp": ".",
312 "file": 1,
313 "location": 2,
314 "message": 3
315 }
316 ],
317 "background": {
318 "activeOnStart": True,
319 "beginsPattern": ".",
320 "endsPattern": ".",
321 }
322 }
323 ]
324 })
325 tasks_file = 'tasks.json'
326 IdeBase.update_json_file(
327 self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
328
329 def vscode_tasks_fallback(self, args, modified_recipe):
330 oe_init_dir = modified_recipe.oe_init_dir
331 oe_init = ". %s %s > /dev/null && " % (modified_recipe.oe_init_build_env, modified_recipe.topdir)
332 dt_build = "devtool build "
333 dt_build_label = dt_build + modified_recipe.recipe_id_pretty
334 dt_build_cmd = dt_build + modified_recipe.bpn
335 clean_opt = " --clean"
336 dt_build_clean_label = dt_build + modified_recipe.recipe_id_pretty + clean_opt
337 dt_build_clean_cmd = dt_build + modified_recipe.bpn + clean_opt
338 dt_deploy = "devtool deploy-target "
339 dt_deploy_label = dt_deploy + modified_recipe.recipe_id_pretty
340 dt_deploy_cmd = dt_deploy + modified_recipe.bpn
341 dt_build_deploy_label = "devtool build & deploy-target %s" % modified_recipe.recipe_id_pretty
342 deploy_opts = ' '.join(get_devtool_deploy_opts(args))
343 tasks_dict = {
344 "version": "2.0.0",
345 "tasks": [
346 {
347 "label": dt_build_label,
348 "type": "shell",
349 "command": "bash",
350 "linux": {
351 "options": {
352 "cwd": oe_init_dir
353 }
354 },
355 "args": [
356 "--login",
357 "-c",
358 "%s%s" % (oe_init, dt_build_cmd)
359 ],
360 "problemMatcher": []
361 },
362 {
363 "label": dt_deploy_label,
364 "type": "shell",
365 "command": "bash",
366 "linux": {
367 "options": {
368 "cwd": oe_init_dir
369 }
370 },
371 "args": [
372 "--login",
373 "-c",
374 "%s%s %s" % (
375 oe_init, dt_deploy_cmd, deploy_opts)
376 ],
377 "problemMatcher": []
378 },
379 {
380 "label": dt_build_deploy_label,
381 "dependsOrder": "sequence",
382 "dependsOn": [
383 dt_build_label,
384 dt_deploy_label
385 ],
386 "problemMatcher": [],
387 "group": {
388 "kind": "build",
389 "isDefault": True
390 }
391 },
392 {
393 "label": dt_build_clean_label,
394 "type": "shell",
395 "command": "bash",
396 "linux": {
397 "options": {
398 "cwd": oe_init_dir
399 }
400 },
401 "args": [
402 "--login",
403 "-c",
404 "%s%s" % (oe_init, dt_build_clean_cmd)
405 ],
406 "problemMatcher": []
407 }
408 ]
409 }
410 if modified_recipe.gdb_cross:
411 for gdb_cross_config in self.gdb_cross_configs:
412 if gdb_cross_config.modified_recipe is not modified_recipe:
413 continue
414 tasks_dict['tasks'].append(
415 {
416 "label": gdb_cross_config.id_pretty,
417 "type": "shell",
418 "isBackground": True,
419 "dependsOn": [
420 dt_build_deploy_label
421 ],
422 "command": gdb_cross_config.gdbserver_script,
423 "problemMatcher": [
424 {
425 "pattern": [
426 {
427 "regexp": ".",
428 "file": 1,
429 "location": 2,
430 "message": 3
431 }
432 ],
433 "background": {
434 "activeOnStart": True,
435 "beginsPattern": ".",
436 "endsPattern": ".",
437 }
438 }
439 ]
440 })
441 tasks_file = 'tasks.json'
442 IdeBase.update_json_file(
443 self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
444
445 def vscode_tasks(self, args, modified_recipe):
446 if modified_recipe.build_tool.is_c_ccp:
447 self.vscode_tasks_cpp(args, modified_recipe)
448 else:
449 self.vscode_tasks_fallback(args, modified_recipe)
450
451 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
452 self.vscode_settings(modified_recipe, image_recipe)
453 self.vscode_extensions(modified_recipe)
454 self.vscode_c_cpp_properties(modified_recipe)
455 if args.target:
456 self.initialize_gdb_cross_configs(
457 image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfigVSCode)
458 self.vscode_launch(modified_recipe)
459 self.vscode_tasks(args, modified_recipe)
460
461
462def register_ide_plugin(ide_plugins):
463 ide_plugins['code'] = IdeVSCode
diff --git a/scripts/lib/devtool/ide_plugins/ide_none.py b/scripts/lib/devtool/ide_plugins/ide_none.py
new file mode 100644
index 0000000000..f106c5a026
--- /dev/null
+++ b/scripts/lib/devtool/ide_plugins/ide_none.py
@@ -0,0 +1,53 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk generic IDE plugin"""
7
8import os
9import logging
10from devtool.ide_plugins import IdeBase, GdbCrossConfig
11
12logger = logging.getLogger('devtool')
13
14
15class IdeNone(IdeBase):
16 """Generate some generic helpers for other IDEs
17
18 Modified recipe mode:
19 Generate some helper scripts for remote debugging with GDB
20
21 Shared sysroot mode:
22 A wrapper for bitbake meta-ide-support and bitbake build-sysroots
23 """
24
25 def __init__(self):
26 super().__init__()
27
28 def setup_shared_sysroots(self, shared_env):
29 real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
30 deploy_dir_image = shared_env.ide_support.deploy_dir_image
31 env_script = os.path.join(
32 deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
33 logger.info(
34 "To use this SDK please source this: %s" % env_script)
35
36 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
37 """generate some helper scripts and config files
38
39 - Execute the do_install task
40 - Execute devtool deploy-target
41 - Generate a gdbinit file per executable
42 - Generate the oe-scripts sym-link
43 """
44 script_path = modified_recipe.gen_install_deploy_script(args)
45 logger.info("Created: %s" % script_path)
46
47 self.initialize_gdb_cross_configs(image_recipe, modified_recipe)
48
49 IdeBase.gen_oe_scrtips_sym_link(modified_recipe)
50
51
52def register_ide_plugin(ide_plugins):
53 ide_plugins['none'] = IdeNone
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py
new file mode 100755
index 0000000000..7807b322b3
--- /dev/null
+++ b/scripts/lib/devtool/ide_sdk.py
@@ -0,0 +1,1070 @@
1# Development tool - ide-sdk command plugin
2#
3# Copyright (C) 2023-2024 Siemens AG
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool ide-sdk plugin"""
8
9import json
10import logging
11import os
12import re
13import shutil
14import stat
15import subprocess
16import sys
17from argparse import RawTextHelpFormatter
18from enum import Enum
19
20import scriptutils
21import bb
22from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError, parse_recipe
23from devtool.standard import get_real_srctree
24from devtool.ide_plugins import BuildTool
25
26
27logger = logging.getLogger('devtool')
28
29# dict of classes derived from IdeBase
30ide_plugins = {}
31
32
33class DevtoolIdeMode(Enum):
34 """Different modes are supported by the ide-sdk plugin.
35
36 The enum might be extended by more advanced modes in the future. Some ideas:
37 - auto: modified if all recipes are modified, shared if none of the recipes is modified.
38 - mixed: modified mode for modified recipes, shared mode for all other recipes.
39 """
40
41 modified = 'modified'
42 shared = 'shared'
43
44
45class TargetDevice:
46 """SSH remote login parameters"""
47
48 def __init__(self, args):
49 self.extraoptions = ''
50 if args.no_host_check:
51 self.extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
52 self.ssh_sshexec = 'ssh'
53 if args.ssh_exec:
54 self.ssh_sshexec = args.ssh_exec
55 self.ssh_port = ''
56 if args.port:
57 self.ssh_port = "-p %s" % args.port
58 if args.key:
59 self.extraoptions += ' -i %s' % args.key
60
61 self.target = args.target
62 target_sp = args.target.split('@')
63 if len(target_sp) == 1:
64 self.login = ""
65 self.host = target_sp[0]
66 elif len(target_sp) == 2:
67 self.login = target_sp[0]
68 self.host = target_sp[1]
69 else:
70 logger.error("Invalid target argument: %s" % args.target)
71
72
73class RecipeNative:
74 """Base class for calling bitbake to provide a -native recipe"""
75
76 def __init__(self, name, target_arch=None):
77 self.name = name
78 self.target_arch = target_arch
79 self.bootstrap_tasks = [self.name + ':do_addto_recipe_sysroot']
80 self.staging_bindir_native = None
81 self.target_sys = None
82 self.__native_bin = None
83
84 def _initialize(self, config, workspace, tinfoil):
85 """Get the parsed recipe"""
86 recipe_d = parse_recipe(
87 config, tinfoil, self.name, appends=True, filter_workspace=False)
88 if not recipe_d:
89 raise DevtoolError("Parsing %s recipe failed" % self.name)
90 self.staging_bindir_native = os.path.realpath(
91 recipe_d.getVar('STAGING_BINDIR_NATIVE'))
92 self.target_sys = recipe_d.getVar('TARGET_SYS')
93 return recipe_d
94
95 def initialize(self, config, workspace, tinfoil):
96 """Basic initialization that can be overridden by a derived class"""
97 self._initialize(config, workspace, tinfoil)
98
99 @property
100 def native_bin(self):
101 if not self.__native_bin:
102 raise DevtoolError("native binary name is not defined.")
103 return self.__native_bin
104
105
106class RecipeGdbCross(RecipeNative):
107 """Handle handle gdb-cross on the host and the gdbserver on the target device"""
108
109 def __init__(self, args, target_arch, target_device):
110 super().__init__('gdb-cross-' + target_arch, target_arch)
111 self.target_device = target_device
112 self.gdb = None
113 self.gdbserver_port_next = int(args.gdbserver_port_start)
114 self.config_db = {}
115
116 def __find_gdbserver(self, config, tinfoil):
117 """Absolute path of the gdbserver"""
118 recipe_d_gdb = parse_recipe(
119 config, tinfoil, 'gdb', appends=True, filter_workspace=False)
120 if not recipe_d_gdb:
121 raise DevtoolError("Parsing gdb recipe failed")
122 return os.path.join(recipe_d_gdb.getVar('bindir'), 'gdbserver')
123
124 def initialize(self, config, workspace, tinfoil):
125 super()._initialize(config, workspace, tinfoil)
126 gdb_bin = self.target_sys + '-gdb'
127 gdb_path = os.path.join(
128 self.staging_bindir_native, self.target_sys, gdb_bin)
129 self.gdb = gdb_path
130 self.gdbserver_path = self.__find_gdbserver(config, tinfoil)
131
132 @property
133 def host(self):
134 return self.target_device.host
135
136
137class RecipeImage:
138 """Handle some image recipe related properties
139
140 Most workflows require firmware that runs on the target device.
141 This firmware must be consistent with the setup of the host system.
142 In particular, the debug symbols must be compatible. For this, the
143 rootfs must be created as part of the SDK.
144 """
145
146 def __init__(self, name):
147 self.combine_dbg_image = False
148 self.gdbserver_missing = False
149 self.name = name
150 self.rootfs = None
151 self.__rootfs_dbg = None
152 self.bootstrap_tasks = [self.name + ':do_build']
153
154 def initialize(self, config, tinfoil):
155 image_d = parse_recipe(
156 config, tinfoil, self.name, appends=True, filter_workspace=False)
157 if not image_d:
158 raise DevtoolError(
159 "Parsing image recipe %s failed" % self.name)
160
161 self.combine_dbg_image = bb.data.inherits_class(
162 'image-combined-dbg', image_d)
163
164 workdir = image_d.getVar('WORKDIR')
165 self.rootfs = os.path.join(workdir, 'rootfs')
166 if image_d.getVar('IMAGE_GEN_DEBUGFS') == "1":
167 self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg')
168
169 self.gdbserver_missing = 'gdbserver' not in image_d.getVar(
170 'IMAGE_INSTALL')
171
172 @property
173 def debug_support(self):
174 return bool(self.rootfs_dbg)
175
176 @property
177 def rootfs_dbg(self):
178 if self.__rootfs_dbg and os.path.isdir(self.__rootfs_dbg):
179 return self.__rootfs_dbg
180 return None
181
182
183class RecipeMetaIdeSupport:
184 """For the shared sysroots mode meta-ide-support is needed
185
186 For use cases where just a cross tool-chain is required but
187 no recipe is used, devtool ide-sdk abstracts calling bitbake meta-ide-support
188 and bitbake build-sysroots. This also allows to expose the cross-toolchains
189 to IDEs. For example VSCode support different tool-chains with e.g. cmake-kits.
190 """
191
192 def __init__(self):
193 self.bootstrap_tasks = ['meta-ide-support:do_build']
194 self.topdir = None
195 self.datadir = None
196 self.deploy_dir_image = None
197 self.build_sys = None
198 # From toolchain-scripts
199 self.real_multimach_target_sys = None
200
201 def initialize(self, config, tinfoil):
202 meta_ide_support_d = parse_recipe(
203 config, tinfoil, 'meta-ide-support', appends=True, filter_workspace=False)
204 if not meta_ide_support_d:
205 raise DevtoolError("Parsing meta-ide-support recipe failed")
206
207 self.topdir = meta_ide_support_d.getVar('TOPDIR')
208 self.datadir = meta_ide_support_d.getVar('datadir')
209 self.deploy_dir_image = meta_ide_support_d.getVar(
210 'DEPLOY_DIR_IMAGE')
211 self.build_sys = meta_ide_support_d.getVar('BUILD_SYS')
212 self.real_multimach_target_sys = meta_ide_support_d.getVar(
213 'REAL_MULTIMACH_TARGET_SYS')
214
215
216class RecipeBuildSysroots:
217 """For the shared sysroots mode build-sysroots is needed"""
218
219 def __init__(self):
220 self.standalone_sysroot = None
221 self.standalone_sysroot_native = None
222 self.bootstrap_tasks = [
223 'build-sysroots:do_build_target_sysroot',
224 'build-sysroots:do_build_native_sysroot'
225 ]
226
227 def initialize(self, config, tinfoil):
228 build_sysroots_d = parse_recipe(
229 config, tinfoil, 'build-sysroots', appends=True, filter_workspace=False)
230 if not build_sysroots_d:
231 raise DevtoolError("Parsing build-sysroots recipe failed")
232 self.standalone_sysroot = build_sysroots_d.getVar(
233 'STANDALONE_SYSROOT')
234 self.standalone_sysroot_native = build_sysroots_d.getVar(
235 'STANDALONE_SYSROOT_NATIVE')
236
237
238class SharedSysrootsEnv:
239 """Handle the shared sysroots based workflow
240
241 Support the workflow with just a tool-chain without a recipe.
242 It's basically like:
243 bitbake some-dependencies
244 bitbake meta-ide-support
245 bitbake build-sysroots
246 Use the environment-* file found in the deploy folder
247 """
248
249 def __init__(self):
250 self.ide_support = None
251 self.build_sysroots = None
252
253 def initialize(self, ide_support, build_sysroots):
254 self.ide_support = ide_support
255 self.build_sysroots = build_sysroots
256
257 def setup_ide(self, ide):
258 ide.setup(self)
259
260
261class RecipeNotModified:
262 """Handling of recipes added to the Direct DSK shared sysroots."""
263
264 def __init__(self, name):
265 self.name = name
266 self.bootstrap_tasks = [name + ':do_populate_sysroot']
267
268
269class RecipeModified:
270 """Handling of recipes in the workspace created by devtool modify"""
271 OE_INIT_BUILD_ENV = 'oe-init-build-env'
272
273 VALID_BASH_ENV_NAME_CHARS = re.compile(r"^[a-zA-Z0-9_]*$")
274
275 def __init__(self, name):
276 self.name = name
277 self.bootstrap_tasks = [name + ':do_install']
278 self.gdb_cross = None
279 # workspace
280 self.real_srctree = None
281 self.srctree = None
282 self.ide_sdk_dir = None
283 self.ide_sdk_scripts_dir = None
284 self.bbappend = None
285 # recipe variables from d.getVar
286 self.b = None
287 self.base_libdir = None
288 self.bblayers = None
289 self.bpn = None
290 self.d = None
291 self.fakerootcmd = None
292 self.fakerootenv = None
293 self.libdir = None
294 self.max_process = None
295 self.package_arch = None
296 self.package_debug_split_style = None
297 self.path = None
298 self.pn = None
299 self.recipe_sysroot = None
300 self.recipe_sysroot_native = None
301 self.staging_incdir = None
302 self.strip_cmd = None
303 self.target_arch = None
304 self.target_dbgsrc_dir = None
305 self.topdir = None
306 self.workdir = None
307 self.recipe_id = None
308 # replicate bitbake build environment
309 self.exported_vars = None
310 self.cmd_compile = None
311 self.__oe_init_dir = None
312 # main build tool used by this recipe
313 self.build_tool = BuildTool.UNDEFINED
314 # build_tool = cmake
315 self.oecmake_generator = None
316 self.cmake_cache_vars = None
317 # build_tool = meson
318 self.meson_buildtype = None
319 self.meson_wrapper = None
320 self.mesonopts = None
321 self.extra_oemeson = None
322 self.meson_cross_file = None
323
324 def initialize(self, config, workspace, tinfoil):
325 recipe_d = parse_recipe(
326 config, tinfoil, self.name, appends=True, filter_workspace=False)
327 if not recipe_d:
328 raise DevtoolError("Parsing %s recipe failed" % self.name)
329
330 # Verify this recipe is built as externalsrc setup by devtool modify
331 workspacepn = check_workspace_recipe(
332 workspace, self.name, bbclassextend=True)
333 self.srctree = workspace[workspacepn]['srctree']
334 # Need to grab this here in case the source is within a subdirectory
335 self.real_srctree = get_real_srctree(
336 self.srctree, recipe_d.getVar('S'), recipe_d.getVar('WORKDIR'))
337 self.bbappend = workspace[workspacepn]['bbappend']
338
339 self.ide_sdk_dir = os.path.join(
340 config.workspace_path, 'ide-sdk', self.name)
341 if os.path.exists(self.ide_sdk_dir):
342 shutil.rmtree(self.ide_sdk_dir)
343 self.ide_sdk_scripts_dir = os.path.join(self.ide_sdk_dir, 'scripts')
344
345 self.b = recipe_d.getVar('B')
346 self.base_libdir = recipe_d.getVar('base_libdir')
347 self.bblayers = recipe_d.getVar('BBLAYERS').split()
348 self.bpn = recipe_d.getVar('BPN')
349 self.cxx = recipe_d.getVar('CXX')
350 self.d = recipe_d.getVar('D')
351 self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD')
352 self.fakerootenv = recipe_d.getVar('FAKEROOTENV')
353 self.libdir = recipe_d.getVar('libdir')
354 self.max_process = int(recipe_d.getVar(
355 "BB_NUMBER_THREADS") or os.cpu_count() or 1)
356 self.package_arch = recipe_d.getVar('PACKAGE_ARCH')
357 self.package_debug_split_style = recipe_d.getVar(
358 'PACKAGE_DEBUG_SPLIT_STYLE')
359 self.path = recipe_d.getVar('PATH')
360 self.pn = recipe_d.getVar('PN')
361 self.recipe_sysroot = os.path.realpath(
362 recipe_d.getVar('RECIPE_SYSROOT'))
363 self.recipe_sysroot_native = os.path.realpath(
364 recipe_d.getVar('RECIPE_SYSROOT_NATIVE'))
365 self.staging_bindir_toolchain = os.path.realpath(
366 recipe_d.getVar('STAGING_BINDIR_TOOLCHAIN'))
367 self.staging_incdir = os.path.realpath(
368 recipe_d.getVar('STAGING_INCDIR'))
369 self.strip_cmd = recipe_d.getVar('STRIP')
370 self.target_arch = recipe_d.getVar('TARGET_ARCH')
371 self.target_dbgsrc_dir = recipe_d.getVar('TARGET_DBGSRC_DIR')
372 self.topdir = recipe_d.getVar('TOPDIR')
373 self.workdir = os.path.realpath(recipe_d.getVar('WORKDIR'))
374
375 self.__init_exported_variables(recipe_d)
376
377 if bb.data.inherits_class('cmake', recipe_d):
378 self.oecmake_generator = recipe_d.getVar('OECMAKE_GENERATOR')
379 self.__init_cmake_preset_cache(recipe_d)
380 self.build_tool = BuildTool.CMAKE
381 elif bb.data.inherits_class('meson', recipe_d):
382 self.meson_buildtype = recipe_d.getVar('MESON_BUILDTYPE')
383 self.mesonopts = recipe_d.getVar('MESONOPTS')
384 self.extra_oemeson = recipe_d.getVar('EXTRA_OEMESON')
385 self.meson_cross_file = recipe_d.getVar('MESON_CROSS_FILE')
386 self.build_tool = BuildTool.MESON
387
388 # Recipe ID is the identifier for IDE config sections
389 self.recipe_id = self.bpn + "-" + self.package_arch
390 self.recipe_id_pretty = self.bpn + ": " + self.package_arch
391
392 def append_to_bbappend(self, append_text):
393 with open(self.bbappend, 'a') as bbap:
394 bbap.write(append_text)
395
396 def remove_from_bbappend(self, append_text):
397 with open(self.bbappend, 'r') as bbap:
398 text = bbap.read()
399 new_text = text.replace(append_text, '')
400 with open(self.bbappend, 'w') as bbap:
401 bbap.write(new_text)
402
403 @staticmethod
404 def is_valid_shell_variable(var):
405 """Skip strange shell variables like systemd
406
407 prevent from strange bugs because of strange variables which
408 are not used in this context but break various tools.
409 """
410 if RecipeModified.VALID_BASH_ENV_NAME_CHARS.match(var):
411 bb.debug(1, "ignoring variable: %s" % var)
412 return True
413 return False
414
415 def debug_build_config(self, args):
416 """Explicitely set for example CMAKE_BUILD_TYPE to Debug if not defined otherwise"""
417 if self.build_tool is BuildTool.CMAKE:
418 append_text = os.linesep + \
419 'OECMAKE_ARGS:append = " -DCMAKE_BUILD_TYPE:STRING=Debug"' + os.linesep
420 if args.debug_build_config and not 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars:
421 self.cmake_cache_vars['CMAKE_BUILD_TYPE'] = {
422 "type": "STRING",
423 "value": "Debug",
424 }
425 self.append_to_bbappend(append_text)
426 elif 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars:
427 del self.cmake_cache_vars['CMAKE_BUILD_TYPE']
428 self.remove_from_bbappend(append_text)
429 elif self.build_tool is BuildTool.MESON:
430 append_text = os.linesep + 'MESON_BUILDTYPE = "debug"' + os.linesep
431 if args.debug_build_config and self.meson_buildtype != "debug":
432 self.mesonopts.replace(
433 '--buildtype ' + self.meson_buildtype, '--buildtype debug')
434 self.append_to_bbappend(append_text)
435 elif self.meson_buildtype == "debug":
436 self.mesonopts.replace(
437 '--buildtype debug', '--buildtype plain')
438 self.remove_from_bbappend(append_text)
439 elif args.debug_build_config:
440 logger.warn(
441 "--debug-build-config is not implemented for this build tool yet.")
442
443 def solib_search_path(self, image):
444 """Search for debug symbols in the rootfs and rootfs-dbg
445
446 The debug symbols of shared libraries which are provided by other packages
447 are grabbed from the -dbg packages in the rootfs-dbg.
448
449 But most cross debugging tools like gdb, perf, and systemtap need to find
450 executable/library first and through it debuglink note find corresponding
451 symbols file. Therefore the library paths from the rootfs are added as well.
452
453 Note: For the devtool modified recipe compiled from the IDE, the debug
454 symbols are taken from the unstripped binaries in the image folder.
455 Also, devtool deploy-target takes the files from the image folder.
456 debug symbols in the image folder refer to the corresponding source files
457 with absolute paths of the build machine. Debug symbols found in the
458 rootfs-dbg are relocated and contain paths which refer to the source files
459 installed on the target device e.g. /usr/src/...
460 """
461 base_libdir = self.base_libdir.lstrip('/')
462 libdir = self.libdir.lstrip('/')
463 so_paths = [
464 # debug symbols for package_debug_split_style: debug-with-srcpkg or .debug
465 os.path.join(image.rootfs_dbg, base_libdir, ".debug"),
466 os.path.join(image.rootfs_dbg, libdir, ".debug"),
467 # debug symbols for package_debug_split_style: debug-file-directory
468 os.path.join(image.rootfs_dbg, "usr", "lib", "debug"),
469
470 # The binaries are required as well, the debug packages are not enough
471 # With image-combined-dbg.bbclass the binaries are copied into rootfs-dbg
472 os.path.join(image.rootfs_dbg, base_libdir),
473 os.path.join(image.rootfs_dbg, libdir),
474 # Without image-combined-dbg.bbclass the binaries are only in rootfs.
475 # Note: Stepping into source files located in rootfs-dbg does not
476 # work without image-combined-dbg.bbclass yet.
477 os.path.join(image.rootfs, base_libdir),
478 os.path.join(image.rootfs, libdir)
479 ]
480 return so_paths
481
482 def solib_search_path_str(self, image):
483 """Return a : separated list of paths usable by GDB's set solib-search-path"""
484 return ':'.join(self.solib_search_path(image))
485
486 def __init_exported_variables(self, d):
487 """Find all variables with export flag set.
488
489 This allows to generate IDE configurations which compile with the same
490 environment as bitbake does. That's at least a reasonable default behavior.
491 """
492 exported_vars = {}
493
494 vars = (key for key in d.keys() if not key.startswith(
495 "__") and not d.getVarFlag(key, "func", False))
496 for var in vars:
497 func = d.getVarFlag(var, "func", False)
498 if d.getVarFlag(var, 'python', False) and func:
499 continue
500 export = d.getVarFlag(var, "export", False)
501 unexport = d.getVarFlag(var, "unexport", False)
502 if not export and not unexport and not func:
503 continue
504 if unexport:
505 continue
506
507 val = d.getVar(var)
508 if val is None:
509 continue
510 if set(var) & set("-.{}+"):
511 logger.warn(
512 "Warning: Found invalid character in variable name %s", str(var))
513 continue
514 varExpanded = d.expand(var)
515 val = str(val)
516
517 if not RecipeModified.is_valid_shell_variable(varExpanded):
518 continue
519
520 if func:
521 code_line = "line: {0}, file: {1}\n".format(
522 d.getVarFlag(var, "lineno", False),
523 d.getVarFlag(var, "filename", False))
524 val = val.rstrip('\n')
525 logger.warn("Warning: exported shell function %s() is not exported (%s)" %
526 (varExpanded, code_line))
527 continue
528
529 if export:
530 exported_vars[varExpanded] = val.strip()
531 continue
532
533 self.exported_vars = exported_vars
534
535 def __init_cmake_preset_cache(self, d):
536 """Get the arguments passed to cmake
537
538 Replicate the cmake configure arguments with all details to
539 share on build folder between bitbake and SDK.
540 """
541 site_file = os.path.join(self.workdir, 'site-file.cmake')
542 if os.path.exists(site_file):
543 print("Warning: site-file.cmake is not supported")
544
545 cache_vars = {}
546 oecmake_args = d.getVar('OECMAKE_ARGS').split()
547 extra_oecmake = d.getVar('EXTRA_OECMAKE').split()
548 for param in oecmake_args + extra_oecmake:
549 d_pref = "-D"
550 if param.startswith(d_pref):
551 param = param[len(d_pref):]
552 else:
553 print("Error: expected a -D")
554 param_s = param.split('=', 1)
555 param_nt = param_s[0].split(':', 1)
556
557 def handle_undefined_variable(var):
558 if var.startswith('${') and var.endswith('}'):
559 return ''
560 else:
561 return var
562 # Example: FOO=ON
563 if len(param_nt) == 1:
564 cache_vars[param_s[0]] = handle_undefined_variable(param_s[1])
565 # Example: FOO:PATH=/tmp
566 elif len(param_nt) == 2:
567 cache_vars[param_nt[0]] = {
568 "type": param_nt[1],
569 "value": handle_undefined_variable(param_s[1]),
570 }
571 else:
572 print("Error: cannot parse %s" % param)
573 self.cmake_cache_vars = cache_vars
574
575 def cmake_preset(self):
576 """Create a preset for cmake that mimics how bitbake calls cmake"""
577 toolchain_file = os.path.join(self.workdir, 'toolchain.cmake')
578 cmake_executable = os.path.join(
579 self.recipe_sysroot_native, 'usr', 'bin', 'cmake')
580 self.cmd_compile = cmake_executable + " --build --preset " + self.recipe_id
581
582 preset_dict_configure = {
583 "name": self.recipe_id,
584 "displayName": self.recipe_id_pretty,
585 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
586 "binaryDir": self.b,
587 "generator": self.oecmake_generator,
588 "toolchainFile": toolchain_file,
589 "cacheVariables": self.cmake_cache_vars,
590 "environment": self.exported_vars,
591 "cmakeExecutable": cmake_executable
592 }
593
594 preset_dict_build = {
595 "name": self.recipe_id,
596 "displayName": self.recipe_id_pretty,
597 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
598 "configurePreset": self.recipe_id,
599 "inheritConfigureEnvironment": True
600 }
601
602 preset_dict_test = {
603 "name": self.recipe_id,
604 "displayName": self.recipe_id_pretty,
605 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
606 "configurePreset": self.recipe_id,
607 "inheritConfigureEnvironment": True
608 }
609
610 preset_dict = {
611 "version": 3, # cmake 3.21, backward compatible with kirkstone
612 "configurePresets": [preset_dict_configure],
613 "buildPresets": [preset_dict_build],
614 "testPresets": [preset_dict_test]
615 }
616
617 # Finally write the json file
618 json_file = 'CMakeUserPresets.json'
619 json_path = os.path.join(self.real_srctree, json_file)
620 logger.info("Updating CMake preset: %s (%s)" % (json_file, json_path))
621 if not os.path.exists(self.real_srctree):
622 os.makedirs(self.real_srctree)
623 try:
624 with open(json_path) as f:
625 orig_dict = json.load(f)
626 except json.decoder.JSONDecodeError:
627 logger.info(
628 "Decoding %s failed. Probably because of comments in the json file" % json_path)
629 orig_dict = {}
630 except FileNotFoundError:
631 orig_dict = {}
632
633 # Add or update the presets for the recipe and keep other presets
634 for k, v in preset_dict.items():
635 if isinstance(v, list):
636 update_preset = v[0]
637 preset_added = False
638 if k in orig_dict:
639 for index, orig_preset in enumerate(orig_dict[k]):
640 if 'name' in orig_preset:
641 if orig_preset['name'] == update_preset['name']:
642 logger.debug("Updating preset: %s" %
643 orig_preset['name'])
644 orig_dict[k][index] = update_preset
645 preset_added = True
646 break
647 else:
648 logger.debug("keeping preset: %s" %
649 orig_preset['name'])
650 else:
651 logger.warn("preset without a name found")
652 if not preset_added:
653 if not k in orig_dict:
654 orig_dict[k] = []
655 orig_dict[k].append(update_preset)
656 logger.debug("Added preset: %s" %
657 update_preset['name'])
658 else:
659 orig_dict[k] = v
660
661 with open(json_path, 'w') as f:
662 json.dump(orig_dict, f, indent=4)
663
664 def gen_meson_wrapper(self):
665 """Generate a wrapper script to call meson with the cross environment"""
666 bb.utils.mkdirhier(self.ide_sdk_scripts_dir)
667 meson_wrapper = os.path.join(self.ide_sdk_scripts_dir, 'meson')
668 meson_real = os.path.join(
669 self.recipe_sysroot_native, 'usr', 'bin', 'meson.real')
670 with open(meson_wrapper, 'w') as mwrap:
671 mwrap.write("#!/bin/sh" + os.linesep)
672 for var, val in self.exported_vars.items():
673 mwrap.write('export %s="%s"' % (var, val) + os.linesep)
674 mwrap.write("unset CC CXX CPP LD AR NM STRIP" + os.linesep)
675 private_temp = os.path.join(self.b, "meson-private", "tmp")
676 mwrap.write('mkdir -p "%s"' % private_temp + os.linesep)
677 mwrap.write('export TMPDIR="%s"' % private_temp + os.linesep)
678 mwrap.write('exec "%s" "$@"' % meson_real + os.linesep)
679 st = os.stat(meson_wrapper)
680 os.chmod(meson_wrapper, st.st_mode | stat.S_IEXEC)
681 self.meson_wrapper = meson_wrapper
682 self.cmd_compile = meson_wrapper + " compile -C " + self.b
683
684 def which(self, executable):
685 bin_path = shutil.which(executable, path=self.path)
686 if not bin_path:
687 raise DevtoolError(
688 'Cannot find %s. Probably the recipe %s is not built yet.' % (executable, self.bpn))
689 return bin_path
690
691 @staticmethod
692 def is_elf_file(file_path):
693 with open(file_path, "rb") as f:
694 data = f.read(4)
695 if data == b'\x7fELF':
696 return True
697 return False
698
699 def find_installed_binaries(self):
700 """find all executable elf files in the image directory"""
701 binaries = []
702 d_len = len(self.d)
703 re_so = re.compile(r'.*\.so[.0-9]*$')
704 for root, _, files in os.walk(self.d, followlinks=False):
705 for file in files:
706 if os.path.islink(file):
707 continue
708 if re_so.match(file):
709 continue
710 abs_name = os.path.join(root, file)
711 if os.access(abs_name, os.X_OK) and RecipeModified.is_elf_file(abs_name):
712 binaries.append(abs_name[d_len:])
713 return sorted(binaries)
714
715 def gen_delete_package_dirs(self):
716 """delete folders of package tasks
717
718 This is a workaround for and issue with recipes having their sources
719 downloaded as file://
720 This likely breaks pseudo like:
721 path mismatch [3 links]: ino 79147802 db
722 .../build/tmp/.../cmake-example/1.0/package/usr/src/debug/
723 cmake-example/1.0-r0/oe-local-files/cpp-example-lib.cpp
724 .../build/workspace/sources/cmake-example/oe-local-files/cpp-example-lib.cpp
725 Since the files are anyway outdated lets deleted them (also from pseudo's db) to workaround this issue.
726 """
727 cmd_lines = ['#!/bin/sh']
728
729 # Set up the appropriate environment
730 newenv = dict(os.environ)
731 for varvalue in self.fakerootenv.split():
732 if '=' in varvalue:
733 splitval = varvalue.split('=', 1)
734 newenv[splitval[0]] = splitval[1]
735
736 # Replicate the environment variables from bitbake
737 for var, val in newenv.items():
738 if not RecipeModified.is_valid_shell_variable(var):
739 continue
740 cmd_lines.append('%s="%s"' % (var, val))
741 cmd_lines.append('export %s' % var)
742
743 # Delete the folders
744 pkg_dirs = ' '.join([os.path.join(self.workdir, d) for d in [
745 "package", "packages-split", "pkgdata", "sstate-install-package", "debugsources.list", "*.spec"]])
746 cmd = "%s rm -rf %s" % (self.fakerootcmd, pkg_dirs)
747 cmd_lines.append('%s || { "%s failed"; exit 1; }' % (cmd, cmd))
748
749 return self.write_script(cmd_lines, 'delete_package_dirs')
750
751 def gen_deploy_target_script(self, args):
752 """Generate a script which does what devtool deploy-target does
753
754 This script is much quicker than devtool target-deploy. Because it
755 does not need to start a bitbake server. All information from tinfoil
756 is hard-coded in the generated script.
757 """
758 cmd_lines = ['#!%s' % str(sys.executable)]
759 cmd_lines.append('import sys')
760 cmd_lines.append('devtool_sys_path = %s' % str(sys.path))
761 cmd_lines.append('devtool_sys_path.reverse()')
762 cmd_lines.append('for p in devtool_sys_path:')
763 cmd_lines.append(' if p not in sys.path:')
764 cmd_lines.append(' sys.path.insert(0, p)')
765 cmd_lines.append('from devtool.deploy import deploy_no_d')
766 args_filter = ['debug', 'dry_run', 'key', 'no_check_space', 'no_host_check',
767 'no_preserve', 'port', 'show_status', 'ssh_exec', 'strip', 'target']
768 filtered_args_dict = {key: value for key, value in vars(
769 args).items() if key in args_filter}
770 cmd_lines.append('filtered_args_dict = %s' % str(filtered_args_dict))
771 cmd_lines.append('class Dict2Class(object):')
772 cmd_lines.append(' def __init__(self, my_dict):')
773 cmd_lines.append(' for key in my_dict:')
774 cmd_lines.append(' setattr(self, key, my_dict[key])')
775 cmd_lines.append('filtered_args = Dict2Class(filtered_args_dict)')
776 cmd_lines.append(
777 'setattr(filtered_args, "recipename", "%s")' % self.bpn)
778 cmd_lines.append('deploy_no_d("%s", "%s", "%s", "%s", "%s", "%s", %d, "%s", "%s", filtered_args)' %
779 (self.d, self.workdir, self.path, self.strip_cmd,
780 self.libdir, self.base_libdir, self.max_process,
781 self.fakerootcmd, self.fakerootenv))
782 return self.write_script(cmd_lines, 'deploy_target')
783
784 def gen_install_deploy_script(self, args):
785 """Generate a script which does install and deploy"""
786 cmd_lines = ['#!/bin/bash']
787
788 cmd_lines.append(self.gen_delete_package_dirs())
789
790 # . oe-init-build-env $BUILDDIR
791 # Note: Sourcing scripts with arguments requires bash
792 cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % (
793 self.oe_init_dir, self.oe_init_dir))
794 cmd_lines.append('. "%s" "%s" || { echo ". %s %s failed"; exit 1; }' % (
795 self.oe_init_build_env, self.topdir, self.oe_init_build_env, self.topdir))
796
797 # bitbake -c install
798 cmd_lines.append(
799 'bitbake %s -c install --force || { echo "bitbake %s -c install --force failed"; exit 1; }' % (self.bpn, self.bpn))
800
801 # Self contained devtool deploy-target
802 cmd_lines.append(self.gen_deploy_target_script(args))
803
804 return self.write_script(cmd_lines, 'install_and_deploy')
805
806 def write_script(self, cmd_lines, script_name):
807 bb.utils.mkdirhier(self.ide_sdk_scripts_dir)
808 script_name_arch = script_name + '_' + self.recipe_id
809 script_file = os.path.join(self.ide_sdk_scripts_dir, script_name_arch)
810 with open(script_file, 'w') as script_f:
811 script_f.write(os.linesep.join(cmd_lines))
812 st = os.stat(script_file)
813 os.chmod(script_file, st.st_mode | stat.S_IEXEC)
814 return script_file
815
816 @property
817 def oe_init_build_env(self):
818 """Find the oe-init-build-env used for this setup"""
819 oe_init_dir = self.oe_init_dir
820 if oe_init_dir:
821 return os.path.join(oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV)
822 return None
823
824 @property
825 def oe_init_dir(self):
826 """Find the directory where the oe-init-build-env is located
827
828 Assumption: There might be a layer with higher priority than poky
829 which provides to oe-init-build-env in the layer's toplevel folder.
830 """
831 if not self.__oe_init_dir:
832 for layer in reversed(self.bblayers):
833 result = subprocess.run(
834 ['git', 'rev-parse', '--show-toplevel'], cwd=layer, capture_output=True)
835 if result.returncode == 0:
836 oe_init_dir = result.stdout.decode('utf-8').strip()
837 oe_init_path = os.path.join(
838 oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV)
839 if os.path.exists(oe_init_path):
840 logger.debug("Using %s from: %s" % (
841 RecipeModified.OE_INIT_BUILD_ENV, oe_init_path))
842 self.__oe_init_dir = oe_init_dir
843 break
844 if not self.__oe_init_dir:
845 logger.error("Cannot find the bitbake top level folder")
846 return self.__oe_init_dir
847
848
849def ide_setup(args, config, basepath, workspace):
850 """Generate the IDE configuration for the workspace"""
851
852 # Explicitely passing some special recipes does not make sense
853 for recipe in args.recipenames:
854 if recipe in ['meta-ide-support', 'build-sysroots']:
855 raise DevtoolError("Invalid recipe: %s." % recipe)
856
857 # Collect information about tasks which need to be bitbaked
858 bootstrap_tasks = []
859 bootstrap_tasks_late = []
860 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
861 try:
862 # define mode depending on recipes which need to be processed
863 recipes_image_names = []
864 recipes_modified_names = []
865 recipes_other_names = []
866 for recipe in args.recipenames:
867 try:
868 check_workspace_recipe(
869 workspace, recipe, bbclassextend=True)
870 recipes_modified_names.append(recipe)
871 except DevtoolError:
872 recipe_d = parse_recipe(
873 config, tinfoil, recipe, appends=True, filter_workspace=False)
874 if not recipe_d:
875 raise DevtoolError("Parsing recipe %s failed" % recipe)
876 if bb.data.inherits_class('image', recipe_d):
877 recipes_image_names.append(recipe)
878 else:
879 recipes_other_names.append(recipe)
880
881 invalid_params = False
882 if args.mode == DevtoolIdeMode.shared:
883 if len(recipes_modified_names):
884 logger.error("In shared sysroots mode modified recipes %s cannot be handled." % str(
885 recipes_modified_names))
886 invalid_params = True
887 if args.mode == DevtoolIdeMode.modified:
888 if len(recipes_other_names):
889 logger.error("Only in shared sysroots mode not modified recipes %s can be handled." % str(
890 recipes_other_names))
891 invalid_params = True
892 if len(recipes_image_names) != 1:
893 logger.error(
894 "One image recipe is required as the rootfs for the remote development.")
895 invalid_params = True
896 for modified_recipe_name in recipes_modified_names:
897 if modified_recipe_name.startswith('nativesdk-') or modified_recipe_name.endswith('-native'):
898 logger.error(
899 "Only cross compiled recipes are support. %s is not cross." % modified_recipe_name)
900 invalid_params = True
901
902 if invalid_params:
903 raise DevtoolError("Invalid parameters are passed.")
904
905 # For the shared sysroots mode, add all dependencies of all the images to the sysroots
906 # For the modified mode provide one rootfs and the corresponding debug symbols via rootfs-dbg
907 recipes_images = []
908 for recipes_image_name in recipes_image_names:
909 logger.info("Using image: %s" % recipes_image_name)
910 recipe_image = RecipeImage(recipes_image_name)
911 recipe_image.initialize(config, tinfoil)
912 bootstrap_tasks += recipe_image.bootstrap_tasks
913 recipes_images.append(recipe_image)
914
915 # Provide a Direct SDK with shared sysroots
916 recipes_not_modified = []
917 if args.mode == DevtoolIdeMode.shared:
918 ide_support = RecipeMetaIdeSupport()
919 ide_support.initialize(config, tinfoil)
920 bootstrap_tasks += ide_support.bootstrap_tasks
921
922 logger.info("Adding %s to the Direct SDK sysroots." %
923 str(recipes_other_names))
924 for recipe_name in recipes_other_names:
925 recipe_not_modified = RecipeNotModified(recipe_name)
926 bootstrap_tasks += recipe_not_modified.bootstrap_tasks
927 recipes_not_modified.append(recipe_not_modified)
928
929 build_sysroots = RecipeBuildSysroots()
930 build_sysroots.initialize(config, tinfoil)
931 bootstrap_tasks_late += build_sysroots.bootstrap_tasks
932 shared_env = SharedSysrootsEnv()
933 shared_env.initialize(ide_support, build_sysroots)
934
935 recipes_modified = []
936 if args.mode == DevtoolIdeMode.modified:
937 logger.info("Setting up workspaces for modified recipe: %s" %
938 str(recipes_modified_names))
939 gdbs_cross = {}
940 for recipe_name in recipes_modified_names:
941 recipe_modified = RecipeModified(recipe_name)
942 recipe_modified.initialize(config, workspace, tinfoil)
943 bootstrap_tasks += recipe_modified.bootstrap_tasks
944 recipes_modified.append(recipe_modified)
945
946 if recipe_modified.target_arch not in gdbs_cross:
947 target_device = TargetDevice(args)
948 gdb_cross = RecipeGdbCross(
949 args, recipe_modified.target_arch, target_device)
950 gdb_cross.initialize(config, workspace, tinfoil)
951 bootstrap_tasks += gdb_cross.bootstrap_tasks
952 gdbs_cross[recipe_modified.target_arch] = gdb_cross
953 recipe_modified.gdb_cross = gdbs_cross[recipe_modified.target_arch]
954
955 finally:
956 tinfoil.shutdown()
957
958 if not args.skip_bitbake:
959 bb_cmd = 'bitbake '
960 if args.bitbake_k:
961 bb_cmd += "-k "
962 bb_cmd_early = bb_cmd + ' '.join(bootstrap_tasks)
963 exec_build_env_command(
964 config.init_path, basepath, bb_cmd_early, watch=True)
965 if bootstrap_tasks_late:
966 bb_cmd_late = bb_cmd + ' '.join(bootstrap_tasks_late)
967 exec_build_env_command(
968 config.init_path, basepath, bb_cmd_late, watch=True)
969
970 for recipe_image in recipes_images:
971 if (recipe_image.gdbserver_missing):
972 logger.warning(
973 "gdbserver not installed in image %s. Remote debugging will not be available" % recipe_image)
974
975 if recipe_image.combine_dbg_image is False:
976 logger.warning(
977 'IMAGE_CLASSES += "image-combined-dbg" is missing for image %s. Remote debugging will not find debug symbols from rootfs-dbg.' % recipe_image)
978
979 # Instantiate the active IDE plugin
980 ide = ide_plugins[args.ide]()
981 if args.mode == DevtoolIdeMode.shared:
982 ide.setup_shared_sysroots(shared_env)
983 elif args.mode == DevtoolIdeMode.modified:
984 for recipe_modified in recipes_modified:
985 if recipe_modified.build_tool is BuildTool.CMAKE:
986 recipe_modified.cmake_preset()
987 if recipe_modified.build_tool is BuildTool.MESON:
988 recipe_modified.gen_meson_wrapper()
989 ide.setup_modified_recipe(
990 args, recipe_image, recipe_modified)
991 else:
992 raise DevtoolError("Must not end up here.")
993
994
995def register_commands(subparsers, context):
996 """Register devtool subcommands from this plugin"""
997
998 global ide_plugins
999
1000 # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins.
1001 pluginpaths = [os.path.join(path, 'ide_plugins')
1002 for path in context.pluginpaths]
1003 ide_plugin_modules = []
1004 for pluginpath in pluginpaths:
1005 scriptutils.load_plugins(logger, ide_plugin_modules, pluginpath)
1006
1007 for ide_plugin_module in ide_plugin_modules:
1008 if hasattr(ide_plugin_module, 'register_ide_plugin'):
1009 ide_plugin_module.register_ide_plugin(ide_plugins)
1010 # Sort plugins according to their priority. The first entry is the default IDE plugin.
1011 ide_plugins = dict(sorted(ide_plugins.items(),
1012 key=lambda p: p[1].ide_plugin_priority(), reverse=True))
1013
1014 parser_ide_sdk = subparsers.add_parser('ide-sdk', group='working', order=50, formatter_class=RawTextHelpFormatter,
1015 help='Setup the SDK and configure the IDE')
1016 parser_ide_sdk.add_argument(
1017 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n'
1018 'Depending on the --mode paramter different types of SDKs and IDE configurations are generated.')
1019 parser_ide_sdk.add_argument(
1020 '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified,
1021 help='Different SDK types are supported:\n'
1022 '- "' + DevtoolIdeMode.modified.name + '" (default):\n'
1023 ' devtool modify creates a workspace to work on the source code of a recipe.\n'
1024 ' devtool ide-sdk builds the SDK and generates the IDE configuration(s) in the workspace directorie(s)\n'
1025 ' Usage example:\n'
1026 ' devtool modify cmake-example\n'
1027 ' devtool ide-sdk cmake-example core-image-minimal\n'
1028 ' Start the IDE in the workspace folder\n'
1029 ' At least one devtool modified recipe plus one image recipe are required:\n'
1030 ' The image recipe is used to generate the target image and the remote debug configuration.\n'
1031 '- "' + DevtoolIdeMode.shared.name + '":\n'
1032 ' Usage example:\n'
1033 ' devtool ide-sdk -m ' + DevtoolIdeMode.shared.name + ' recipe(s)\n'
1034 ' This command generates a cross-toolchain as well as the corresponding shared sysroot directories.\n'
1035 ' To use this tool-chain the environment-* file found in the deploy..image folder needs to be sourced into a shell.\n'
1036 ' In case of VSCode and cmake the tool-chain is also exposed as a cmake-kit')
1037 default_ide = list(ide_plugins.keys())[0]
1038 parser_ide_sdk.add_argument(
1039 '-i', '--ide', choices=ide_plugins.keys(), default=default_ide,
1040 help='Setup the configuration for this IDE (default: %s)' % default_ide)
1041 parser_ide_sdk.add_argument(
1042 '-t', '--target', default='root@192.168.7.2',
1043 help='Live target machine running an ssh server: user@hostname.')
1044 parser_ide_sdk.add_argument(
1045 '-G', '--gdbserver-port-start', default="1234", help='port where gdbserver is listening.')
1046 parser_ide_sdk.add_argument(
1047 '-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
1048 parser_ide_sdk.add_argument(
1049 '-e', '--ssh-exec', help='Executable to use in place of ssh')
1050 parser_ide_sdk.add_argument(
1051 '-P', '--port', help='Specify ssh port to use for connection to the target')
1052 parser_ide_sdk.add_argument(
1053 '-I', '--key', help='Specify ssh private key for connection to the target')
1054 parser_ide_sdk.add_argument(
1055 '--skip-bitbake', help='Generate IDE configuration but skip calling bibtake to update the SDK.', action='store_true')
1056 parser_ide_sdk.add_argument(
1057 '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true')
1058 parser_ide_sdk.add_argument(
1059 '--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
1060 parser_ide_sdk.add_argument(
1061 '-n', '--dry-run', help='List files to be undeployed only', action='store_true')
1062 parser_ide_sdk.add_argument(
1063 '-s', '--show-status', help='Show progress/status output', action='store_true')
1064 parser_ide_sdk.add_argument(
1065 '-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
1066 parser_ide_sdk.add_argument(
1067 '--no-check-space', help='Do not check for available space before deploying', action='store_true')
1068 parser_ide_sdk.add_argument(
1069 '--debug-build-config', help='Use debug build flags, for example set CMAKE_BUILD_TYPE=Debug', action='store_true')
1070 parser_ide_sdk.set_defaults(func=ide_setup)
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py
index 95384c5333..18daef30c3 100644
--- a/scripts/lib/devtool/menuconfig.py
+++ b/scripts/lib/devtool/menuconfig.py
@@ -3,6 +3,8 @@
3# Copyright (C) 2018 Xilinx 3# Copyright (C) 2018 Xilinx
4# Written by: Chandana Kalluri <ckalluri@xilinx.com> 4# Written by: Chandana Kalluri <ckalluri@xilinx.com>
5# 5#
6# SPDX-License-Identifier: MIT
7#
6# This program is free software; you can redistribute it and/or modify 8# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as 9# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation. 10# published by the Free Software Foundation.
@@ -43,7 +45,7 @@ def menuconfig(args, config, basepath, workspace):
43 return 1 45 return 1
44 46
45 check_workspace_recipe(workspace, args.component) 47 check_workspace_recipe(workspace, args.component)
46 pn = rd.getVar('PN', True) 48 pn = rd.getVar('PN')
47 49
48 if not rd.getVarFlag('do_menuconfig','task'): 50 if not rd.getVarFlag('do_menuconfig','task'):
49 raise DevtoolError("This recipe does not support menuconfig option") 51 raise DevtoolError("This recipe does not support menuconfig option")
diff --git a/scripts/lib/devtool/sdk.py b/scripts/lib/devtool/sdk.py
index 3aa42a1466..9aefd7e354 100644
--- a/scripts/lib/devtool/sdk.py
+++ b/scripts/lib/devtool/sdk.py
@@ -207,7 +207,7 @@ def sdk_update(args, config, basepath, workspace):
207 if not sstate_mirrors: 207 if not sstate_mirrors:
208 with open(os.path.join(conf_dir, 'site.conf'), 'a') as f: 208 with open(os.path.join(conf_dir, 'site.conf'), 'a') as f:
209 f.write('SCONF_VERSION = "%s"\n' % site_conf_version) 209 f.write('SCONF_VERSION = "%s"\n' % site_conf_version)
210 f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver) 210 f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH"\n' % updateserver)
211 finally: 211 finally:
212 shutil.rmtree(tmpsdk_dir) 212 shutil.rmtree(tmpsdk_dir)
213 213
@@ -300,7 +300,8 @@ def sdk_install(args, config, basepath, workspace):
300 return 2 300 return 2
301 301
302 try: 302 try:
303 exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots', watch=True) 303 exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_native_sysroot', watch=True)
304 exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_target_sysroot', watch=True)
304 except bb.process.ExecutionError as e: 305 except bb.process.ExecutionError as e:
305 raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e))) 306 raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e)))
306 307
diff --git a/scripts/lib/devtool/search.py b/scripts/lib/devtool/search.py
index d24040df37..70b81cac5e 100644
--- a/scripts/lib/devtool/search.py
+++ b/scripts/lib/devtool/search.py
@@ -62,10 +62,11 @@ def search(args, config, basepath, workspace):
62 with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f: 62 with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f:
63 for line in f: 63 for line in f:
64 if ': ' in line: 64 if ': ' in line:
65 splitline = line.split(':', 1) 65 splitline = line.split(': ', 1)
66 key = splitline[0] 66 key = splitline[0]
67 value = splitline[1].strip() 67 value = splitline[1].strip()
68 if key in ['PKG_%s' % pkg, 'DESCRIPTION', 'FILES_INFO'] or key.startswith('FILERPROVIDES_'): 68 key = key.replace(":" + pkg, "")
69 if key in ['PKG', 'DESCRIPTION', 'FILES_INFO', 'FILERPROVIDES']:
69 if keyword_rc.search(value): 70 if keyword_rc.search(value):
70 match = True 71 match = True
71 break 72 break
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py
index 7b62b7e7b8..bd009f44b1 100644
--- a/scripts/lib/devtool/standard.py
+++ b/scripts/lib/devtool/standard.py
@@ -147,6 +147,8 @@ def add(args, config, basepath, workspace):
147 extracmdopts += ' -a' 147 extracmdopts += ' -a'
148 if args.npm_dev: 148 if args.npm_dev:
149 extracmdopts += ' --npm-dev' 149 extracmdopts += ' --npm-dev'
150 if args.no_pypi:
151 extracmdopts += ' --no-pypi'
150 if args.mirrors: 152 if args.mirrors:
151 extracmdopts += ' --mirrors' 153 extracmdopts += ' --mirrors'
152 if args.srcrev: 154 if args.srcrev:
@@ -234,10 +236,14 @@ def add(args, config, basepath, workspace):
234 if args.fetchuri and not args.no_git: 236 if args.fetchuri and not args.no_git:
235 setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data) 237 setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data)
236 238
237 initial_rev = None 239 initial_rev = {}
238 if os.path.exists(os.path.join(srctree, '.git')): 240 if os.path.exists(os.path.join(srctree, '.git')):
239 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) 241 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
240 initial_rev = stdout.rstrip() 242 initial_rev["."] = stdout.rstrip()
243 (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse HEAD` $PWD\'', cwd=srctree)
244 for line in stdout.splitlines():
245 (rev, submodule) = line.split()
246 initial_rev[os.path.relpath(submodule, srctree)] = rev
241 247
242 if args.src_subdir: 248 if args.src_subdir:
243 srctree = os.path.join(srctree, args.src_subdir) 249 srctree = os.path.join(srctree, args.src_subdir)
@@ -251,16 +257,17 @@ def add(args, config, basepath, workspace):
251 if b_is_s: 257 if b_is_s:
252 f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) 258 f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree)
253 if initial_rev: 259 if initial_rev:
254 f.write('\n# initial_rev: %s\n' % initial_rev) 260 for key, value in initial_rev.items():
261 f.write('\n# initial_rev %s: %s\n' % (key, value))
255 262
256 if args.binary: 263 if args.binary:
257 f.write('do_install_append() {\n') 264 f.write('do_install:append() {\n')
258 f.write(' rm -rf ${D}/.git\n') 265 f.write(' rm -rf ${D}/.git\n')
259 f.write(' rm -f ${D}/singletask.lock\n') 266 f.write(' rm -f ${D}/singletask.lock\n')
260 f.write('}\n') 267 f.write('}\n')
261 268
262 if bb.data.inherits_class('npm', rd): 269 if bb.data.inherits_class('npm', rd):
263 f.write('python do_configure_append() {\n') 270 f.write('python do_configure:append() {\n')
264 f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n') 271 f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n')
265 f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n') 272 f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n')
266 f.write(' bb.utils.remove(lockfile)\n') 273 f.write(' bb.utils.remove(lockfile)\n')
@@ -318,10 +325,6 @@ def _check_compatible_recipe(pn, d):
318 raise DevtoolError("The %s recipe is a packagegroup, and therefore is " 325 raise DevtoolError("The %s recipe is a packagegroup, and therefore is "
319 "not supported by this tool" % pn, 4) 326 "not supported by this tool" % pn, 4)
320 327
321 if bb.data.inherits_class('meta', d):
322 raise DevtoolError("The %s recipe is a meta-recipe, and therefore is "
323 "not supported by this tool" % pn, 4)
324
325 if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'): 328 if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'):
326 # Not an incompatibility error per se, so we don't pass the error code 329 # Not an incompatibility error per se, so we don't pass the error code
327 raise DevtoolError("externalsrc is currently enabled for the %s " 330 raise DevtoolError("externalsrc is currently enabled for the %s "
@@ -357,7 +360,7 @@ def _move_file(src, dst, dry_run_outdir=None, base_outdir=None):
357 bb.utils.mkdirhier(dst_d) 360 bb.utils.mkdirhier(dst_d)
358 shutil.move(src, dst) 361 shutil.move(src, dst)
359 362
360def _copy_file(src, dst, dry_run_outdir=None): 363def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None):
361 """Copy a file. Creates all the directory components of destination path.""" 364 """Copy a file. Creates all the directory components of destination path."""
362 dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' 365 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
363 logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) 366 logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix))
@@ -457,7 +460,7 @@ def sync(args, config, basepath, workspace):
457 finally: 460 finally:
458 tinfoil.shutdown() 461 tinfoil.shutdown()
459 462
460def symlink_oelocal_files_srctree(rd,srctree): 463def symlink_oelocal_files_srctree(rd, srctree):
461 import oe.patch 464 import oe.patch
462 if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')): 465 if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')):
463 # If recipe extracts to ${WORKDIR}, symlink the files into the srctree 466 # If recipe extracts to ${WORKDIR}, symlink the files into the srctree
@@ -481,11 +484,7 @@ def symlink_oelocal_files_srctree(rd,srctree):
481 os.symlink('oe-local-files/%s' % fn, destpth) 484 os.symlink('oe-local-files/%s' % fn, destpth)
482 addfiles.append(os.path.join(relpth, fn)) 485 addfiles.append(os.path.join(relpth, fn))
483 if addfiles: 486 if addfiles:
484 bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree) 487 oe.patch.GitApplyTree.commitIgnored("Add local file symlinks", dir=srctree, files=addfiles, d=rd)
485 useroptions = []
486 oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
487 bb.process.run('git %s commit -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree)
488
489 488
490def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): 489def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False):
491 """Extract sources of a recipe""" 490 """Extract sources of a recipe"""
@@ -523,8 +522,10 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
523 history = d.varhistory.variable('SRC_URI') 522 history = d.varhistory.variable('SRC_URI')
524 for event in history: 523 for event in history:
525 if not 'flag' in event: 524 if not 'flag' in event:
526 if event['op'].startswith(('_append[', '_prepend[')): 525 if event['op'].startswith((':append[', ':prepend[')):
527 extra_overrides.append(event['op'].split('[')[1].split(']')[0]) 526 override = event['op'].split('[')[1].split(']')[0]
527 if not override.startswith('pn-'):
528 extra_overrides.append(override)
528 # We want to remove duplicate overrides. If a recipe had multiple 529 # We want to remove duplicate overrides. If a recipe had multiple
529 # SRC_URI_override += values it would cause mulitple instances of 530 # SRC_URI_override += values it would cause mulitple instances of
530 # overrides. This doesn't play nicely with things like creating a 531 # overrides. This doesn't play nicely with things like creating a
@@ -569,6 +570,9 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
569 logger.debug('writing append file %s' % appendfile) 570 logger.debug('writing append file %s' % appendfile)
570 with open(appendfile, 'a') as f: 571 with open(appendfile, 'a') as f:
571 f.write('###--- _extract_source\n') 572 f.write('###--- _extract_source\n')
573 f.write('deltask do_recipe_qa\n')
574 f.write('deltask do_recipe_qa_setscene\n')
575 f.write('ERROR_QA:remove = "patch-fuzz"\n')
572 f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir) 576 f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir)
573 f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch) 577 f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch)
574 if not is_kernel_yocto: 578 if not is_kernel_yocto:
@@ -586,6 +590,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
586 preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps') 590 preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps')
587 with open(preservestampfile, 'w') as f: 591 with open(preservestampfile, 'w') as f:
588 f.write(d.getVar('STAMP')) 592 f.write(d.getVar('STAMP'))
593 tinfoil.modified_files()
589 try: 594 try:
590 if is_kernel_yocto: 595 if is_kernel_yocto:
591 # We need to generate the kernel config 596 # We need to generate the kernel config
@@ -648,23 +653,34 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
648 653
649 if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer): 654 if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer):
650 shutil.rmtree(workshareddir) 655 shutil.rmtree(workshareddir)
651 oe.path.copyhardlinktree(srcsubdir,workshareddir) 656 oe.path.copyhardlinktree(srcsubdir, workshareddir)
652 elif not os.path.exists(workshareddir): 657 elif not os.path.exists(workshareddir):
653 oe.path.copyhardlinktree(srcsubdir,workshareddir) 658 oe.path.copyhardlinktree(srcsubdir, workshareddir)
654 659
655 tempdir_localdir = os.path.join(tempdir, 'oe-local-files') 660 tempdir_localdir = os.path.join(tempdir, 'oe-local-files')
656 srctree_localdir = os.path.join(srctree, 'oe-local-files') 661 srctree_localdir = os.path.join(srctree, 'oe-local-files')
657 662
658 if sync: 663 if sync:
659 bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) 664 try:
660 665 logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch))
661 # Move oe-local-files directory to srctree 666 bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree)
662 # As the oe-local-files is not part of the constructed git tree, 667
663 # remove them directly during the synchrounizating might surprise 668 # Use git fetch to update the source with the current recipe
664 # the users. Instead, we move it to oe-local-files.bak and remind 669 # To be able to update the currently checked out branch with
665 # user in the log message. 670 # possibly new history (no fast-forward) git needs to be told
671 # that's ok
672 logger.info('Syncing source files including patches to git branch: %s' % devbranch)
673 bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree)
674 except bb.process.ExecutionError as e:
675 raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e))
676
677 # Move the oe-local-files directory to srctree.
678 # As oe-local-files is not part of the constructed git tree,
679 # removing it directly during the synchronization might surprise
680 # the user. Instead, we move it to oe-local-files.bak and remind
681 # the user in the log message.
666 if os.path.exists(srctree_localdir + '.bak'): 682 if os.path.exists(srctree_localdir + '.bak'):
667 shutil.rmtree(srctree_localdir, srctree_localdir + '.bak') 683 shutil.rmtree(srctree_localdir + '.bak')
668 684
669 if os.path.exists(srctree_localdir): 685 if os.path.exists(srctree_localdir):
670 logger.info('Backing up current local file directory %s' % srctree_localdir) 686 logger.info('Backing up current local file directory %s' % srctree_localdir)
@@ -680,7 +696,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
680 shutil.move(tempdir_localdir, srcsubdir) 696 shutil.move(tempdir_localdir, srcsubdir)
681 697
682 shutil.move(srcsubdir, srctree) 698 shutil.move(srcsubdir, srctree)
683 symlink_oelocal_files_srctree(d,srctree) 699 symlink_oelocal_files_srctree(d, srctree)
684 700
685 if is_kernel_yocto: 701 if is_kernel_yocto:
686 logger.info('Copying kernel config to srctree') 702 logger.info('Copying kernel config to srctree')
@@ -746,14 +762,14 @@ def _check_preserve(config, recipename):
746 os.remove(removefile) 762 os.remove(removefile)
747 else: 763 else:
748 tf.write(line) 764 tf.write(line)
749 os.rename(newfile, origfile) 765 bb.utils.rename(newfile, origfile)
750 766
751def get_staging_kver(srcdir): 767def get_staging_kver(srcdir):
752 # Kernel version from work-shared 768 # Kernel version from work-shared
753 kerver = [] 769 kerver = []
754 staging_kerVer="" 770 staging_kerVer=""
755 if os.path.exists(srcdir) and os.listdir(srcdir): 771 if os.path.exists(srcdir) and os.listdir(srcdir):
756 with open(os.path.join(srcdir,"Makefile")) as f: 772 with open(os.path.join(srcdir, "Makefile")) as f:
757 version = [next(f) for x in range(5)][1:4] 773 version = [next(f) for x in range(5)][1:4]
758 for word in version: 774 for word in version:
759 kerver.append(word.split('= ')[1].split('\n')[0]) 775 kerver.append(word.split('= ')[1].split('\n')[0])
@@ -763,10 +779,20 @@ def get_staging_kver(srcdir):
763def get_staging_kbranch(srcdir): 779def get_staging_kbranch(srcdir):
764 staging_kbranch = "" 780 staging_kbranch = ""
765 if os.path.exists(srcdir) and os.listdir(srcdir): 781 if os.path.exists(srcdir) and os.listdir(srcdir):
766 (branch, _) = bb.process.run('git branch | grep \* | cut -d \' \' -f2', cwd=srcdir) 782 (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir)
767 staging_kbranch = "".join(branch.split('\n')[0]) 783 staging_kbranch = "".join(branch.split('\n')[0])
768 return staging_kbranch 784 return staging_kbranch
769 785
786def get_real_srctree(srctree, s, workdir):
787 # Check that recipe isn't using a shared workdir
788 s = os.path.abspath(s)
789 workdir = os.path.abspath(workdir)
790 if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
791 # Handle if S is set to a subdirectory of the source
792 srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
793 srctree = os.path.join(srctree, srcsubdir)
794 return srctree
795
770def modify(args, config, basepath, workspace): 796def modify(args, config, basepath, workspace):
771 """Entry point for the devtool 'modify' subcommand""" 797 """Entry point for the devtool 'modify' subcommand"""
772 import bb 798 import bb
@@ -811,8 +837,8 @@ def modify(args, config, basepath, workspace):
811 837
812 _check_compatible_recipe(pn, rd) 838 _check_compatible_recipe(pn, rd)
813 839
814 initial_rev = None 840 initial_revs = {}
815 commits = [] 841 commits = {}
816 check_commits = False 842 check_commits = False
817 843
818 if bb.data.inherits_class('kernel-yocto', rd): 844 if bb.data.inherits_class('kernel-yocto', rd):
@@ -824,10 +850,10 @@ def modify(args, config, basepath, workspace):
824 staging_kerVer = get_staging_kver(srcdir) 850 staging_kerVer = get_staging_kver(srcdir)
825 staging_kbranch = get_staging_kbranch(srcdir) 851 staging_kbranch = get_staging_kbranch(srcdir)
826 if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): 852 if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch):
827 oe.path.copyhardlinktree(srcdir,srctree) 853 oe.path.copyhardlinktree(srcdir, srctree)
828 workdir = rd.getVar('WORKDIR') 854 workdir = rd.getVar('WORKDIR')
829 srcsubdir = rd.getVar('S') 855 srcsubdir = rd.getVar('S')
830 localfilesdir = os.path.join(srctree,'oe-local-files') 856 localfilesdir = os.path.join(srctree, 'oe-local-files')
831 # Move local source files into separate subdir 857 # Move local source files into separate subdir
832 recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)] 858 recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)]
833 local_files = oe.recipeutils.get_recipe_local_files(rd) 859 local_files = oe.recipeutils.get_recipe_local_files(rd)
@@ -851,9 +877,9 @@ def modify(args, config, basepath, workspace):
851 for fname in local_files: 877 for fname in local_files:
852 _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) 878 _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname))
853 with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: 879 with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f:
854 f.write('# Ignore local files, by default. Remove this file ''if you want to commit the directory to Git\n*\n') 880 f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n*\n')
855 881
856 symlink_oelocal_files_srctree(rd,srctree) 882 symlink_oelocal_files_srctree(rd, srctree)
857 883
858 task = 'do_configure' 884 task = 'do_configure'
859 res = tinfoil.build_targets(pn, task, handle_events=True) 885 res = tinfoil.build_targets(pn, task, handle_events=True)
@@ -861,22 +887,30 @@ def modify(args, config, basepath, workspace):
861 # Copy .config to workspace 887 # Copy .config to workspace
862 kconfpath = rd.getVar('B') 888 kconfpath = rd.getVar('B')
863 logger.info('Copying kernel config to workspace') 889 logger.info('Copying kernel config to workspace')
864 shutil.copy2(os.path.join(kconfpath, '.config'),srctree) 890 shutil.copy2(os.path.join(kconfpath, '.config'), srctree)
865 891
866 # Set this to true, we still need to get initial_rev 892 # Set this to true, we still need to get initial_rev
867 # by parsing the git repo 893 # by parsing the git repo
868 args.no_extract = True 894 args.no_extract = True
869 895
870 if not args.no_extract: 896 if not args.no_extract:
871 initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) 897 initial_revs["."], _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
872 if not initial_rev: 898 if not initial_revs["."]:
873 return 1 899 return 1
874 logger.info('Source tree extracted to %s' % srctree) 900 logger.info('Source tree extracted to %s' % srctree)
901
875 if os.path.exists(os.path.join(srctree, '.git')): 902 if os.path.exists(os.path.join(srctree, '.git')):
876 # Get list of commits since this revision 903 # Get list of commits since this revision
877 (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=srctree) 904 (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree)
878 commits = stdout.split() 905 commits["."] = stdout.split()
879 check_commits = True 906 check_commits = True
907 (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree)
908 for line in stdout.splitlines():
909 (rev, submodule_path) = line.split()
910 submodule = os.path.relpath(submodule_path, srctree)
911 initial_revs[submodule] = rev
912 (stdout, _) = bb.process.run('git rev-list --reverse devtool-base..HEAD', cwd=submodule_path)
913 commits[submodule] = stdout.split()
880 else: 914 else:
881 if os.path.exists(os.path.join(srctree, '.git')): 915 if os.path.exists(os.path.join(srctree, '.git')):
882 # Check if it's a tree previously extracted by us. This is done 916 # Check if it's a tree previously extracted by us. This is done
@@ -893,11 +927,11 @@ def modify(args, config, basepath, workspace):
893 for line in stdout.splitlines(): 927 for line in stdout.splitlines():
894 if line.startswith('*'): 928 if line.startswith('*'):
895 (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree) 929 (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree)
896 initial_rev = stdout.rstrip() 930 initial_revs["."] = stdout.rstrip()
897 if not initial_rev: 931 if "." not in initial_revs:
898 # Otherwise, just grab the head revision 932 # Otherwise, just grab the head revision
899 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) 933 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
900 initial_rev = stdout.rstrip() 934 initial_revs["."] = stdout.rstrip()
901 935
902 branch_patches = {} 936 branch_patches = {}
903 if check_commits: 937 if check_commits:
@@ -914,62 +948,81 @@ def modify(args, config, basepath, workspace):
914 seen_patches = [] 948 seen_patches = []
915 for branch in branches: 949 for branch in branches:
916 branch_patches[branch] = [] 950 branch_patches[branch] = []
917 (stdout, _) = bb.process.run('git log devtool-base..%s' % branch, cwd=srctree) 951 (stdout, _) = bb.process.run('git rev-list devtool-base..%s' % branch, cwd=srctree)
918 for line in stdout.splitlines(): 952 for sha1 in stdout.splitlines():
919 line = line.strip() 953 notes = oe.patch.GitApplyTree.getNotes(srctree, sha1.strip())
920 if line.startswith(oe.patch.GitApplyTree.patch_line_prefix): 954 origpatch = notes.get(oe.patch.GitApplyTree.original_patch)
921 origpatch = line[len(oe.patch.GitApplyTree.patch_line_prefix):].split(':', 1)[-1].strip() 955 if origpatch and origpatch not in seen_patches:
922 if not origpatch in seen_patches: 956 seen_patches.append(origpatch)
923 seen_patches.append(origpatch) 957 branch_patches[branch].append(origpatch)
924 branch_patches[branch].append(origpatch)
925 958
926 # Need to grab this here in case the source is within a subdirectory 959 # Need to grab this here in case the source is within a subdirectory
927 srctreebase = srctree 960 srctreebase = srctree
928 961 srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
929 # Check that recipe isn't using a shared workdir
930 s = os.path.abspath(rd.getVar('S'))
931 workdir = os.path.abspath(rd.getVar('WORKDIR'))
932 if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
933 # Handle if S is set to a subdirectory of the source
934 srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
935 srctree = os.path.join(srctree, srcsubdir)
936 962
937 bb.utils.mkdirhier(os.path.dirname(appendfile)) 963 bb.utils.mkdirhier(os.path.dirname(appendfile))
938 with open(appendfile, 'w') as f: 964 with open(appendfile, 'w') as f:
939 f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n') 965 # if not present, add type=git-dependency to the secondary sources
966 # (non local files) so they can be extracted correctly when building a recipe after
967 # doing a devtool modify on it
968 src_uri = rd.getVar('SRC_URI').split()
969 src_uri_append = []
970 src_uri_remove = []
971
972 # Assume first entry is main source extracted in ${S} so skip it
973 src_uri = src_uri[1::]
974
975 # Add "type=git-dependency" to all non local sources
976 for url in src_uri:
977 if not url.startswith('file://') and not 'type=' in url:
978 src_uri_remove.append(url)
979 src_uri_append.append('%s;type=git-dependency' % url)
980
981 if src_uri_remove:
982 f.write('SRC_URI:remove = "%s"\n' % ' '.join(src_uri_remove))
983 f.write('SRC_URI:append = " %s"\n\n' % ' '.join(src_uri_append))
984
985 f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n')
940 # Local files can be modified/tracked in separate subdir under srctree 986 # Local files can be modified/tracked in separate subdir under srctree
941 # Mostly useful for packages with S != WORKDIR 987 # Mostly useful for packages with S != WORKDIR
942 f.write('FILESPATH_prepend := "%s:"\n' % 988 f.write('FILESPATH:prepend := "%s:"\n' %
943 os.path.join(srctreebase, 'oe-local-files')) 989 os.path.join(srctreebase, 'oe-local-files'))
944 f.write('# srctreebase: %s\n' % srctreebase) 990 f.write('# srctreebase: %s\n' % srctreebase)
945 991
946 f.write('\ninherit externalsrc\n') 992 f.write('\ninherit externalsrc\n')
947 f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n') 993 f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n')
948 f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree)) 994 f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree))
949 995
950 b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd) 996 b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd)
951 if b_is_s: 997 if b_is_s:
952 f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree)) 998 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
953 999
954 if bb.data.inherits_class('kernel', rd): 1000 if bb.data.inherits_class('kernel', rd):
955 f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' 1001 f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout '
956 'do_fetch do_unpack do_kernel_configme do_kernel_configcheck"\n') 1002 'do_fetch do_unpack do_kernel_configcheck"\n')
957 f.write('\ndo_patch[noexec] = "1"\n') 1003 f.write('\ndo_patch[noexec] = "1"\n')
958 f.write('\ndo_configure_append() {\n' 1004 f.write('\ndo_configure:append() {\n'
959 ' cp ${B}/.config ${S}/.config.baseline\n' 1005 ' cp ${B}/.config ${S}/.config.baseline\n'
960 ' ln -sfT ${B}/.config ${S}/.config.new\n' 1006 ' ln -sfT ${B}/.config ${S}/.config.new\n'
961 '}\n') 1007 '}\n')
962 if rd.getVarFlag('do_menuconfig','task'): 1008 f.write('\ndo_kernel_configme:prepend() {\n'
963 f.write('\ndo_configure_append() {\n' 1009 ' if [ -e ${S}/.config ]; then\n'
964 ' if [ ! ${DEVTOOL_DISABLE_MENUCONFIG} ]; then\n' 1010 ' mv ${S}/.config ${S}/.config.old\n'
965 ' cp ${B}/.config ${S}/.config.baseline\n' 1011 ' fi\n'
966 ' ln -sfT ${B}/.config ${S}/.config.new\n' 1012 '}\n')
1013 if rd.getVarFlag('do_menuconfig', 'task'):
1014 f.write('\ndo_configure:append() {\n'
1015 ' if [ ${@oe.types.boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG"))} = True ]; then\n'
1016 ' cp ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.baseline\n'
1017 ' ln -sfT ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.new\n'
967 ' fi\n' 1018 ' fi\n'
968 '}\n') 1019 '}\n')
969 if initial_rev: 1020 if initial_revs:
970 f.write('\n# initial_rev: %s\n' % initial_rev) 1021 for name, rev in initial_revs.items():
971 for commit in commits: 1022 f.write('\n# initial_rev %s: %s\n' % (name, rev))
972 f.write('# commit: %s\n' % commit) 1023 if name in commits:
1024 for commit in commits[name]:
1025 f.write('# commit %s: %s\n' % (name, commit))
973 if branch_patches: 1026 if branch_patches:
974 for branch in branch_patches: 1027 for branch in branch_patches:
975 if branch == args.branch: 1028 if branch == args.branch:
@@ -1089,10 +1142,10 @@ def rename(args, config, basepath, workspace):
1089 1142
1090 # Rename bbappend 1143 # Rename bbappend
1091 logger.info('Renaming %s to %s' % (append, newappend)) 1144 logger.info('Renaming %s to %s' % (append, newappend))
1092 os.rename(append, newappend) 1145 bb.utils.rename(append, newappend)
1093 # Rename recipe file 1146 # Rename recipe file
1094 logger.info('Renaming %s to %s' % (recipefile, newfile)) 1147 logger.info('Renaming %s to %s' % (recipefile, newfile))
1095 os.rename(recipefile, newfile) 1148 bb.utils.rename(recipefile, newfile)
1096 1149
1097 # Rename source tree if it's the default path 1150 # Rename source tree if it's the default path
1098 appendmd5 = None 1151 appendmd5 = None
@@ -1192,44 +1245,56 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre
1192 branchname = stdout.rstrip() 1245 branchname = stdout.rstrip()
1193 1246
1194 # Parse initial rev from recipe if not specified 1247 # Parse initial rev from recipe if not specified
1195 commits = [] 1248 commits = {}
1196 patches = [] 1249 patches = []
1250 initial_revs = {}
1197 with open(recipe_path, 'r') as f: 1251 with open(recipe_path, 'r') as f:
1198 for line in f: 1252 for line in f:
1199 if line.startswith('# initial_rev:'): 1253 pattern = r'^#\s.*\s(.*):\s([0-9a-fA-F]+)$'
1200 if not initial_rev: 1254 match = re.search(pattern, line)
1201 initial_rev = line.split(':')[-1].strip() 1255 if match:
1202 elif line.startswith('# commit:') and not force_patch_refresh: 1256 name = match.group(1)
1203 commits.append(line.split(':')[-1].strip()) 1257 rev = match.group(2)
1204 elif line.startswith('# patches_%s:' % branchname): 1258 if line.startswith('# initial_rev'):
1205 patches = line.split(':')[-1].strip().split(',') 1259 if not (name == "." and initial_rev):
1206 1260 initial_revs[name] = rev
1207 update_rev = initial_rev 1261 elif line.startswith('# commit') and not force_patch_refresh:
1208 changed_revs = None 1262 if name not in commits:
1209 if initial_rev: 1263 commits[name] = [rev]
1264 else:
1265 commits[name].append(rev)
1266 elif line.startswith('# patches_%s:' % branchname):
1267 patches = line.split(':')[-1].strip().split(',')
1268
1269 update_revs = dict(initial_revs)
1270 changed_revs = {}
1271 for name, rev in initial_revs.items():
1210 # Find first actually changed revision 1272 # Find first actually changed revision
1211 stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' % 1273 stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' %
1212 initial_rev, cwd=srctree) 1274 rev, cwd=os.path.join(srctree, name))
1213 newcommits = stdout.split() 1275 newcommits = stdout.split()
1214 for i in range(min(len(commits), len(newcommits))): 1276 if name in commits:
1215 if newcommits[i] == commits[i]: 1277 for i in range(min(len(commits[name]), len(newcommits))):
1216 update_rev = commits[i] 1278 if newcommits[i] == commits[name][i]:
1279 update_revs[name] = commits[name][i]
1217 1280
1218 try: 1281 try:
1219 stdout, _ = bb.process.run('git cherry devtool-patched', 1282 stdout, _ = bb.process.run('git cherry devtool-patched',
1220 cwd=srctree) 1283 cwd=os.path.join(srctree, name))
1221 except bb.process.ExecutionError as err: 1284 except bb.process.ExecutionError as err:
1222 stdout = None 1285 stdout = None
1223 1286
1224 if stdout is not None and not force_patch_refresh: 1287 if stdout is not None and not force_patch_refresh:
1225 changed_revs = []
1226 for line in stdout.splitlines(): 1288 for line in stdout.splitlines():
1227 if line.startswith('+ '): 1289 if line.startswith('+ '):
1228 rev = line.split()[1] 1290 rev = line.split()[1]
1229 if rev in newcommits: 1291 if rev in newcommits:
1230 changed_revs.append(rev) 1292 if name not in changed_revs:
1293 changed_revs[name] = [rev]
1294 else:
1295 changed_revs[name].append(rev)
1231 1296
1232 return initial_rev, update_rev, changed_revs, patches 1297 return initial_revs, update_revs, changed_revs, patches
1233 1298
1234def _remove_file_entries(srcuri, filelist): 1299def _remove_file_entries(srcuri, filelist):
1235 """Remove file:// entries from SRC_URI""" 1300 """Remove file:// entries from SRC_URI"""
@@ -1284,14 +1349,17 @@ def _remove_source_files(append, files, destpath, no_report_remove=False, dry_ru
1284 raise 1349 raise
1285 1350
1286 1351
1287def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): 1352def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None):
1288 """Export patches from srctree to given location. 1353 """Export patches from srctree to given location.
1289 Returns three-tuple of dicts: 1354 Returns three-tuple of dicts:
1290 1. updated - patches that already exist in SRCURI 1355 1. updated - patches that already exist in SRCURI
1291 2. added - new patches that don't exist in SRCURI 1356 2. added - new patches that don't exist in SRCURI
1292 3 removed - patches that exist in SRCURI but not in exported patches 1357 3 removed - patches that exist in SRCURI but not in exported patches
1293 In each dict the key is the 'basepath' of the URI and value is the 1358 In each dict the key is the 'basepath' of the URI and value is:
1294 absolute path to the existing file in recipe space (if any). 1359 - for updated and added dicts, a dict with 2 optionnal keys:
1360 - 'path': the absolute path to the existing file in recipe space (if any)
1361 - 'patchdir': the directory in wich the patch should be applied (if any)
1362 - for removed dict, the absolute path to the existing file in recipe space
1295 """ 1363 """
1296 import oe.recipeutils 1364 import oe.recipeutils
1297 from oe.patch import GitApplyTree 1365 from oe.patch import GitApplyTree
@@ -1305,54 +1373,60 @@ def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None):
1305 1373
1306 # Generate patches from Git, exclude local files directory 1374 # Generate patches from Git, exclude local files directory
1307 patch_pathspec = _git_exclude_path(srctree, 'oe-local-files') 1375 patch_pathspec = _git_exclude_path(srctree, 'oe-local-files')
1308 GitApplyTree.extractPatches(srctree, start_rev, destdir, patch_pathspec) 1376 GitApplyTree.extractPatches(srctree, start_revs, destdir, patch_pathspec)
1309 1377 for dirpath, dirnames, filenames in os.walk(destdir):
1310 new_patches = sorted(os.listdir(destdir)) 1378 new_patches = filenames
1311 for new_patch in new_patches: 1379 reldirpath = os.path.relpath(dirpath, destdir)
1312 # Strip numbering from patch names. If it's a git sequence named patch, 1380 for new_patch in new_patches:
1313 # the numbers might not match up since we are starting from a different 1381 # Strip numbering from patch names. If it's a git sequence named patch,
1314 # revision This does assume that people are using unique shortlog 1382 # the numbers might not match up since we are starting from a different
1315 # values, but they ought to be anyway... 1383 # revision This does assume that people are using unique shortlog
1316 new_basename = seqpatch_re.match(new_patch).group(2) 1384 # values, but they ought to be anyway...
1317 match_name = None 1385 new_basename = seqpatch_re.match(new_patch).group(2)
1318 for old_patch in existing_patches: 1386 match_name = None
1319 old_basename = seqpatch_re.match(old_patch).group(2) 1387 for old_patch in existing_patches:
1320 old_basename_splitext = os.path.splitext(old_basename) 1388 old_basename = seqpatch_re.match(old_patch).group(2)
1321 if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: 1389 old_basename_splitext = os.path.splitext(old_basename)
1322 old_patch_noext = os.path.splitext(old_patch)[0] 1390 if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename:
1323 match_name = old_patch_noext 1391 old_patch_noext = os.path.splitext(old_patch)[0]
1324 break 1392 match_name = old_patch_noext
1325 elif new_basename == old_basename: 1393 break
1326 match_name = old_patch 1394 elif new_basename == old_basename:
1327 break 1395 match_name = old_patch
1328 if match_name: 1396 break
1329 # Rename patch files 1397 if match_name:
1330 if new_patch != match_name: 1398 # Rename patch files
1331 os.rename(os.path.join(destdir, new_patch), 1399 if new_patch != match_name:
1332 os.path.join(destdir, match_name)) 1400 bb.utils.rename(os.path.join(destdir, new_patch),
1333 # Need to pop it off the list now before checking changed_revs 1401 os.path.join(destdir, match_name))
1334 oldpath = existing_patches.pop(old_patch) 1402 # Need to pop it off the list now before checking changed_revs
1335 if changed_revs is not None: 1403 oldpath = existing_patches.pop(old_patch)
1336 # Avoid updating patches that have not actually changed 1404 if changed_revs is not None and dirpath in changed_revs:
1337 with open(os.path.join(destdir, match_name), 'r') as f: 1405 # Avoid updating patches that have not actually changed
1338 firstlineitems = f.readline().split() 1406 with open(os.path.join(dirpath, match_name), 'r') as f:
1339 # Looking for "From <hash>" line 1407 firstlineitems = f.readline().split()
1340 if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: 1408 # Looking for "From <hash>" line
1341 if not firstlineitems[1] in changed_revs: 1409 if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
1342 continue 1410 if not firstlineitems[1] in changed_revs[dirpath]:
1343 # Recompress if necessary 1411 continue
1344 if oldpath.endswith(('.gz', '.Z')): 1412 # Recompress if necessary
1345 bb.process.run(['gzip', match_name], cwd=destdir) 1413 if oldpath.endswith(('.gz', '.Z')):
1346 if oldpath.endswith('.gz'): 1414 bb.process.run(['gzip', match_name], cwd=destdir)
1347 match_name += '.gz' 1415 if oldpath.endswith('.gz'):
1348 else: 1416 match_name += '.gz'
1349 match_name += '.Z' 1417 else:
1350 elif oldpath.endswith('.bz2'): 1418 match_name += '.Z'
1351 bb.process.run(['bzip2', match_name], cwd=destdir) 1419 elif oldpath.endswith('.bz2'):
1352 match_name += '.bz2' 1420 bb.process.run(['bzip2', match_name], cwd=destdir)
1353 updated[match_name] = oldpath 1421 match_name += '.bz2'
1354 else: 1422 updated[match_name] = {'path' : oldpath}
1355 added[new_patch] = None 1423 if reldirpath != ".":
1424 updated[match_name]['patchdir'] = reldirpath
1425 else:
1426 added[new_patch] = {}
1427 if reldirpath != ".":
1428 added[new_patch]['patchdir'] = reldirpath
1429
1356 return (updated, added, existing_patches) 1430 return (updated, added, existing_patches)
1357 1431
1358 1432
@@ -1389,8 +1463,10 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1389 1. updated - files that already exist in SRCURI 1463 1. updated - files that already exist in SRCURI
1390 2. added - new files files that don't exist in SRCURI 1464 2. added - new files files that don't exist in SRCURI
1391 3 removed - files that exist in SRCURI but not in exported files 1465 3 removed - files that exist in SRCURI but not in exported files
1392 In each dict the key is the 'basepath' of the URI and value is the 1466 In each dict the key is the 'basepath' of the URI and value is:
1393 absolute path to the existing file in recipe space (if any). 1467 - for updated and added dicts, a dict with 1 optionnal key:
1468 - 'path': the absolute path to the existing file in recipe space (if any)
1469 - for removed dict, the absolute path to the existing file in recipe space
1394 """ 1470 """
1395 import oe.recipeutils 1471 import oe.recipeutils
1396 1472
@@ -1403,6 +1479,18 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1403 updated = OrderedDict() 1479 updated = OrderedDict()
1404 added = OrderedDict() 1480 added = OrderedDict()
1405 removed = OrderedDict() 1481 removed = OrderedDict()
1482
1483 # Get current branch and return early with empty lists
1484 # if on one of the override branches
1485 # (local files are provided only for the main branch and processing
1486 # them against lists from recipe overrides will result in mismatches
1487 # and broken modifications to recipes).
1488 stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD',
1489 cwd=srctree)
1490 branchname = stdout.rstrip()
1491 if branchname.startswith(override_branch_prefix):
1492 return (updated, added, removed)
1493
1406 local_files_dir = os.path.join(srctreebase, 'oe-local-files') 1494 local_files_dir = os.path.join(srctreebase, 'oe-local-files')
1407 git_files = _git_ls_tree(srctree) 1495 git_files = _git_ls_tree(srctree)
1408 if 'oe-local-files' in git_files: 1496 if 'oe-local-files' in git_files:
@@ -1460,9 +1548,9 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1460 origpath = existing_files.pop(fname) 1548 origpath = existing_files.pop(fname)
1461 workpath = os.path.join(local_files_dir, fname) 1549 workpath = os.path.join(local_files_dir, fname)
1462 if not filecmp.cmp(origpath, workpath): 1550 if not filecmp.cmp(origpath, workpath):
1463 updated[fname] = origpath 1551 updated[fname] = {'path' : origpath}
1464 elif fname != '.gitignore': 1552 elif fname != '.gitignore':
1465 added[fname] = None 1553 added[fname] = {}
1466 1554
1467 workdir = rd.getVar('WORKDIR') 1555 workdir = rd.getVar('WORKDIR')
1468 s = rd.getVar('S') 1556 s = rd.getVar('S')
@@ -1479,7 +1567,7 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1479 if os.path.exists(fpath): 1567 if os.path.exists(fpath):
1480 origpath = existing_files.pop(fname) 1568 origpath = existing_files.pop(fname)
1481 if not filecmp.cmp(origpath, fpath): 1569 if not filecmp.cmp(origpath, fpath):
1482 updated[fpath] = origpath 1570 updated[fpath] = {'path' : origpath}
1483 1571
1484 removed = existing_files 1572 removed = existing_files
1485 return (updated, added, removed) 1573 return (updated, added, removed)
@@ -1508,6 +1596,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1508 recipedir = os.path.basename(recipefile) 1596 recipedir = os.path.basename(recipefile)
1509 logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix)) 1597 logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix))
1510 1598
1599 # Get original SRCREV
1600 old_srcrev = rd.getVar('SRCREV') or ''
1601 if old_srcrev == "INVALID":
1602 raise DevtoolError('Update mode srcrev is only valid for recipe fetched from an SCM repository')
1603 old_srcrev = {'.': old_srcrev}
1604
1511 # Get HEAD revision 1605 # Get HEAD revision
1512 try: 1606 try:
1513 stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree) 1607 stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
@@ -1534,13 +1628,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1534 if not no_remove: 1628 if not no_remove:
1535 # Find list of existing patches in recipe file 1629 # Find list of existing patches in recipe file
1536 patches_dir = tempfile.mkdtemp(dir=tempdir) 1630 patches_dir = tempfile.mkdtemp(dir=tempdir)
1537 old_srcrev = rd.getVar('SRCREV') or ''
1538 upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev, 1631 upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev,
1539 patches_dir) 1632 patches_dir)
1540 logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p))) 1633 logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p)))
1541 1634
1542 # Remove deleted local files and "overlapping" patches 1635 # Remove deleted local files and "overlapping" patches
1543 remove_files = list(del_f.values()) + list(upd_p.values()) + list(del_p.values()) 1636 remove_files = list(del_f.values()) + [value["path"] for value in upd_p.values() if "path" in value] + [value["path"] for value in del_p.values() if "path" in value]
1544 if remove_files: 1637 if remove_files:
1545 removedentries = _remove_file_entries(srcuri, remove_files)[0] 1638 removedentries = _remove_file_entries(srcuri, remove_files)[0]
1546 update_srcuri = True 1639 update_srcuri = True
@@ -1554,14 +1647,14 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1554 patchfields['SRC_URI'] = '\\\n '.join(srcuri) 1647 patchfields['SRC_URI'] = '\\\n '.join(srcuri)
1555 if dry_run_outdir: 1648 if dry_run_outdir:
1556 logger.info('Creating bbappend (dry-run)') 1649 logger.info('Creating bbappend (dry-run)')
1557 else: 1650 appendfile, destpath = oe.recipeutils.bbappend_recipe(
1558 appendfile, destpath = oe.recipeutils.bbappend_recipe( 1651 rd, appendlayerdir, files, wildcardver=wildcard_version,
1559 rd, appendlayerdir, files, wildcardver=wildcard_version, 1652 extralines=patchfields, removevalues=removevalues,
1560 extralines=patchfields, removevalues=removevalues, 1653 redirect_output=dry_run_outdir)
1561 redirect_output=dry_run_outdir)
1562 else: 1654 else:
1563 files_dir = _determine_files_dir(rd) 1655 files_dir = _determine_files_dir(rd)
1564 for basepath, path in upd_f.items(): 1656 for basepath, param in upd_f.items():
1657 path = param['path']
1565 logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) 1658 logger.info('Updating file %s%s' % (basepath, dry_run_suffix))
1566 if os.path.isabs(basepath): 1659 if os.path.isabs(basepath):
1567 # Original file (probably with subdir pointing inside source tree) 1660 # Original file (probably with subdir pointing inside source tree)
@@ -1571,7 +1664,8 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
1571 _move_file(os.path.join(local_files_dir, basepath), path, 1664 _move_file(os.path.join(local_files_dir, basepath), path,
1572 dry_run_outdir=dry_run_outdir, base_outdir=recipedir) 1665 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1573 update_srcuri= True 1666 update_srcuri= True
1574 for basepath, path in new_f.items(): 1667 for basepath, param in new_f.items():
1668 path = param['path']
1575 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) 1669 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
1576 _move_file(os.path.join(local_files_dir, basepath), 1670 _move_file(os.path.join(local_files_dir, basepath),
1577 os.path.join(files_dir, basepath), 1671 os.path.join(files_dir, basepath),
@@ -1603,9 +1697,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1603 if not os.path.exists(append): 1697 if not os.path.exists(append):
1604 raise DevtoolError('unable to find workspace bbappend for recipe %s' % 1698 raise DevtoolError('unable to find workspace bbappend for recipe %s' %
1605 recipename) 1699 recipename)
1700 srctreebase = workspace[recipename]['srctreebase']
1701 relpatchdir = os.path.relpath(srctreebase, srctree)
1702 if relpatchdir == '.':
1703 patchdir_params = {}
1704 else:
1705 patchdir_params = {'patchdir': relpatchdir}
1606 1706
1607 initial_rev, update_rev, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh) 1707 def srcuri_entry(basepath, patchdir_params):
1608 if not initial_rev: 1708 if patchdir_params:
1709 paramstr = ';' + ';'.join('%s=%s' % (k,v) for k,v in patchdir_params.items())
1710 else:
1711 paramstr = ''
1712 return 'file://%s%s' % (basepath, paramstr)
1713
1714 initial_revs, update_revs, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh)
1715 if not initial_revs:
1609 raise DevtoolError('Unable to find initial revision - please specify ' 1716 raise DevtoolError('Unable to find initial revision - please specify '
1610 'it with --initial-rev') 1717 'it with --initial-rev')
1611 1718
@@ -1619,61 +1726,69 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1619 tempdir = tempfile.mkdtemp(prefix='devtool') 1726 tempdir = tempfile.mkdtemp(prefix='devtool')
1620 try: 1727 try:
1621 local_files_dir = tempfile.mkdtemp(dir=tempdir) 1728 local_files_dir = tempfile.mkdtemp(dir=tempdir)
1622 if filter_patches: 1729 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
1623 upd_f = {}
1624 new_f = {}
1625 del_f = {}
1626 else:
1627 srctreebase = workspace[recipename]['srctreebase']
1628 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
1629
1630 remove_files = []
1631 if not no_remove:
1632 # Get all patches from source tree and check if any should be removed
1633 all_patches_dir = tempfile.mkdtemp(dir=tempdir)
1634 _, _, del_p = _export_patches(srctree, rd, initial_rev,
1635 all_patches_dir)
1636 # Remove deleted local files and patches
1637 remove_files = list(del_f.values()) + list(del_p.values())
1638 1730
1639 # Get updated patches from source tree 1731 # Get updated patches from source tree
1640 patches_dir = tempfile.mkdtemp(dir=tempdir) 1732 patches_dir = tempfile.mkdtemp(dir=tempdir)
1641 upd_p, new_p, _ = _export_patches(srctree, rd, update_rev, 1733 upd_p, new_p, _ = _export_patches(srctree, rd, update_revs,
1642 patches_dir, changed_revs) 1734 patches_dir, changed_revs)
1735 # Get all patches from source tree and check if any should be removed
1736 all_patches_dir = tempfile.mkdtemp(dir=tempdir)
1737 _, _, del_p = _export_patches(srctree, rd, initial_revs,
1738 all_patches_dir)
1643 logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p))) 1739 logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p)))
1644 if filter_patches: 1740 if filter_patches:
1645 new_p = OrderedDict() 1741 new_p = OrderedDict()
1646 upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches) 1742 upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches)
1647 remove_files = [f for f in remove_files if f in filter_patches] 1743 del_p = OrderedDict((k,v) for k,v in del_p.items() if k in filter_patches)
1744 remove_files = []
1745 if not no_remove:
1746 # Remove deleted local files and patches
1747 remove_files = list(del_f.values()) + list(del_p.values())
1648 updatefiles = False 1748 updatefiles = False
1649 updaterecipe = False 1749 updaterecipe = False
1650 destpath = None 1750 destpath = None
1651 srcuri = (rd.getVar('SRC_URI', False) or '').split() 1751 srcuri = (rd.getVar('SRC_URI', False) or '').split()
1752
1652 if appendlayerdir: 1753 if appendlayerdir:
1653 files = OrderedDict((os.path.join(local_files_dir, key), val) for 1754 files = OrderedDict((os.path.join(local_files_dir, key), val) for
1654 key, val in list(upd_f.items()) + list(new_f.items())) 1755 key, val in list(upd_f.items()) + list(new_f.items()))
1655 files.update(OrderedDict((os.path.join(patches_dir, key), val) for 1756 files.update(OrderedDict((os.path.join(patches_dir, key), val) for
1656 key, val in list(upd_p.items()) + list(new_p.items()))) 1757 key, val in list(upd_p.items()) + list(new_p.items())))
1758
1759 params = []
1760 for file, param in files.items():
1761 patchdir_param = dict(patchdir_params)
1762 patchdir = param.get('patchdir', ".")
1763 if patchdir != "." :
1764 if patchdir_param:
1765 patchdir_param['patchdir'] += patchdir
1766 else:
1767 patchdir_param['patchdir'] = patchdir
1768 params.append(patchdir_param)
1769
1657 if files or remove_files: 1770 if files or remove_files:
1658 removevalues = None 1771 removevalues = None
1659 if remove_files: 1772 if remove_files:
1660 removedentries, remaining = _remove_file_entries( 1773 removedentries, remaining = _remove_file_entries(
1661 srcuri, remove_files) 1774 srcuri, remove_files)
1662 if removedentries or remaining: 1775 if removedentries or remaining:
1663 remaining = ['file://' + os.path.basename(item) for 1776 remaining = [srcuri_entry(os.path.basename(item), patchdir_params) for
1664 item in remaining] 1777 item in remaining]
1665 removevalues = {'SRC_URI': removedentries + remaining} 1778 removevalues = {'SRC_URI': removedentries + remaining}
1666 appendfile, destpath = oe.recipeutils.bbappend_recipe( 1779 appendfile, destpath = oe.recipeutils.bbappend_recipe(
1667 rd, appendlayerdir, files, 1780 rd, appendlayerdir, files,
1668 wildcardver=wildcard_version, 1781 wildcardver=wildcard_version,
1669 removevalues=removevalues, 1782 removevalues=removevalues,
1670 redirect_output=dry_run_outdir) 1783 redirect_output=dry_run_outdir,
1784 params=params)
1671 else: 1785 else:
1672 logger.info('No patches or local source files needed updating') 1786 logger.info('No patches or local source files needed updating')
1673 else: 1787 else:
1674 # Update existing files 1788 # Update existing files
1675 files_dir = _determine_files_dir(rd) 1789 files_dir = _determine_files_dir(rd)
1676 for basepath, path in upd_f.items(): 1790 for basepath, param in upd_f.items():
1791 path = param['path']
1677 logger.info('Updating file %s' % basepath) 1792 logger.info('Updating file %s' % basepath)
1678 if os.path.isabs(basepath): 1793 if os.path.isabs(basepath):
1679 # Original file (probably with subdir pointing inside source tree) 1794 # Original file (probably with subdir pointing inside source tree)
@@ -1684,14 +1799,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1684 _move_file(os.path.join(local_files_dir, basepath), path, 1799 _move_file(os.path.join(local_files_dir, basepath), path,
1685 dry_run_outdir=dry_run_outdir, base_outdir=recipedir) 1800 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1686 updatefiles = True 1801 updatefiles = True
1687 for basepath, path in upd_p.items(): 1802 for basepath, param in upd_p.items():
1688 patchfn = os.path.join(patches_dir, basepath) 1803 path = param['path']
1804 patchdir = param.get('patchdir', ".")
1805 if patchdir != "." :
1806 patchdir_param = dict(patchdir_params)
1807 if patchdir_param:
1808 patchdir_param['patchdir'] += patchdir
1809 else:
1810 patchdir_param['patchdir'] = patchdir
1811 patchfn = os.path.join(patches_dir, patchdir, basepath)
1689 if os.path.dirname(path) + '/' == dl_dir: 1812 if os.path.dirname(path) + '/' == dl_dir:
1690 # This is a a downloaded patch file - we now need to 1813 # This is a a downloaded patch file - we now need to
1691 # replace the entry in SRC_URI with our local version 1814 # replace the entry in SRC_URI with our local version
1692 logger.info('Replacing remote patch %s with updated local version' % basepath) 1815 logger.info('Replacing remote patch %s with updated local version' % basepath)
1693 path = os.path.join(files_dir, basepath) 1816 path = os.path.join(files_dir, basepath)
1694 _replace_srcuri_entry(srcuri, basepath, 'file://%s' % basepath) 1817 _replace_srcuri_entry(srcuri, basepath, srcuri_entry(basepath, patchdir_param))
1695 updaterecipe = True 1818 updaterecipe = True
1696 else: 1819 else:
1697 logger.info('Updating patch %s%s' % (basepath, dry_run_suffix)) 1820 logger.info('Updating patch %s%s' % (basepath, dry_run_suffix))
@@ -1699,21 +1822,29 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
1699 dry_run_outdir=dry_run_outdir, base_outdir=recipedir) 1822 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1700 updatefiles = True 1823 updatefiles = True
1701 # Add any new files 1824 # Add any new files
1702 for basepath, path in new_f.items(): 1825 for basepath, param in new_f.items():
1703 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) 1826 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
1704 _move_file(os.path.join(local_files_dir, basepath), 1827 _move_file(os.path.join(local_files_dir, basepath),
1705 os.path.join(files_dir, basepath), 1828 os.path.join(files_dir, basepath),
1706 dry_run_outdir=dry_run_outdir, 1829 dry_run_outdir=dry_run_outdir,
1707 base_outdir=recipedir) 1830 base_outdir=recipedir)
1708 srcuri.append('file://%s' % basepath) 1831 srcuri.append(srcuri_entry(basepath, patchdir_params))
1709 updaterecipe = True 1832 updaterecipe = True
1710 for basepath, path in new_p.items(): 1833 for basepath, param in new_p.items():
1834 patchdir = param.get('patchdir', ".")
1711 logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix)) 1835 logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix))
1712 _move_file(os.path.join(patches_dir, basepath), 1836 _move_file(os.path.join(patches_dir, patchdir, basepath),
1713 os.path.join(files_dir, basepath), 1837 os.path.join(files_dir, basepath),
1714 dry_run_outdir=dry_run_outdir, 1838 dry_run_outdir=dry_run_outdir,
1715 base_outdir=recipedir) 1839 base_outdir=recipedir)
1716 srcuri.append('file://%s' % basepath) 1840 params = dict(patchdir_params)
1841 if patchdir != "." :
1842 if params:
1843 params['patchdir'] += patchdir
1844 else:
1845 params['patchdir'] = patchdir
1846
1847 srcuri.append(srcuri_entry(basepath, params))
1717 updaterecipe = True 1848 updaterecipe = True
1718 # Update recipe, if needed 1849 # Update recipe, if needed
1719 if _remove_file_entries(srcuri, remove_files)[0]: 1850 if _remove_file_entries(srcuri, remove_files)[0]:
@@ -1770,6 +1901,8 @@ def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_ver
1770 for line in stdout.splitlines(): 1901 for line in stdout.splitlines():
1771 branchname = line[2:] 1902 branchname = line[2:]
1772 if line.startswith('* '): 1903 if line.startswith('* '):
1904 if 'HEAD' in line:
1905 raise DevtoolError('Detached HEAD - please check out a branch, e.g., "devtool"')
1773 startbranch = branchname 1906 startbranch = branchname
1774 if branchname.startswith(override_branch_prefix): 1907 if branchname.startswith(override_branch_prefix):
1775 override_branches.append(branchname) 1908 override_branches.append(branchname)
@@ -1959,9 +2092,19 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
1959 shutil.rmtree(srctreebase) 2092 shutil.rmtree(srctreebase)
1960 else: 2093 else:
1961 # We don't want to risk wiping out any work in progress 2094 # We don't want to risk wiping out any work in progress
1962 logger.info('Leaving source tree %s as-is; if you no ' 2095 if srctreebase.startswith(os.path.join(config.workspace_path, 'sources')):
1963 'longer need it then please delete it manually' 2096 from datetime import datetime
1964 % srctreebase) 2097 preservesrc = os.path.join(config.workspace_path, 'attic', 'sources', "{}.{}".format(pn, datetime.now().strftime("%Y%m%d%H%M%S")))
2098 logger.info('Preserving source tree in %s\nIf you no '
2099 'longer need it then please delete it manually.\n'
2100 'It is also possible to reuse it via devtool source tree argument.'
2101 % preservesrc)
2102 bb.utils.mkdirhier(os.path.dirname(preservesrc))
2103 shutil.move(srctreebase, preservesrc)
2104 else:
2105 logger.info('Leaving source tree %s as-is; if you no '
2106 'longer need it then please delete it manually'
2107 % srctreebase)
1965 else: 2108 else:
1966 # This is unlikely, but if it's empty we can just remove it 2109 # This is unlikely, but if it's empty we can just remove it
1967 os.rmdir(srctreebase) 2110 os.rmdir(srctreebase)
@@ -2221,6 +2364,7 @@ def register_commands(subparsers, context):
2221 group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") 2364 group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
2222 parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI') 2365 parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI')
2223 parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true") 2366 parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true")
2367 parser_add.add_argument('--no-pypi', help='Do not inherit pypi class', action="store_true")
2224 parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)') 2368 parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
2225 parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true") 2369 parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
2226 group = parser_add.add_mutually_exclusive_group() 2370 group = parser_add.add_mutually_exclusive_group()
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py
index 5a057e95f5..fa5b8ef3c7 100644
--- a/scripts/lib/devtool/upgrade.py
+++ b/scripts/lib/devtool/upgrade.py
@@ -35,6 +35,8 @@ def _get_srctree(tmpdir):
35 dirs = scriptutils.filter_src_subdirs(tmpdir) 35 dirs = scriptutils.filter_src_subdirs(tmpdir)
36 if len(dirs) == 1: 36 if len(dirs) == 1:
37 srctree = os.path.join(tmpdir, dirs[0]) 37 srctree = os.path.join(tmpdir, dirs[0])
38 else:
39 raise DevtoolError("Cannot determine where the source tree is after unpacking in {}: {}".format(tmpdir,dirs))
38 return srctree 40 return srctree
39 41
40def _copy_source_code(orig, dest): 42def _copy_source_code(orig, dest):
@@ -71,7 +73,8 @@ def _rename_recipe_dirs(oldpv, newpv, path):
71 if oldfile.find(oldpv) != -1: 73 if oldfile.find(oldpv) != -1:
72 newfile = oldfile.replace(oldpv, newpv) 74 newfile = oldfile.replace(oldpv, newpv)
73 if oldfile != newfile: 75 if oldfile != newfile:
74 os.rename(os.path.join(path, oldfile), os.path.join(path, newfile)) 76 bb.utils.rename(os.path.join(path, oldfile),
77 os.path.join(path, newfile))
75 78
76def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path): 79def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path):
77 oldrecipe = os.path.basename(oldrecipe) 80 oldrecipe = os.path.basename(oldrecipe)
@@ -87,7 +90,7 @@ def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path):
87 _rename_recipe_dirs(oldpv, newpv, path) 90 _rename_recipe_dirs(oldpv, newpv, path)
88 return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path) 91 return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path)
89 92
90def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d): 93def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d):
91 """Writes an append file""" 94 """Writes an append file"""
92 if not os.path.exists(rc): 95 if not os.path.exists(rc):
93 raise DevtoolError("bbappend not created because %s does not exist" % rc) 96 raise DevtoolError("bbappend not created because %s does not exist" % rc)
@@ -102,36 +105,38 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d)
102 pn = d.getVar('PN') 105 pn = d.getVar('PN')
103 af = os.path.join(appendpath, '%s.bbappend' % brf) 106 af = os.path.join(appendpath, '%s.bbappend' % brf)
104 with open(af, 'w') as f: 107 with open(af, 'w') as f:
105 f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n') 108 f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n')
109 # Local files can be modified/tracked in separate subdir under srctree
110 # Mostly useful for packages with S != WORKDIR
111 f.write('FILESPATH:prepend := "%s:"\n' %
112 os.path.join(srctreebase, 'oe-local-files'))
113 f.write('# srctreebase: %s\n' % srctreebase)
106 f.write('inherit externalsrc\n') 114 f.write('inherit externalsrc\n')
107 f.write(('# NOTE: We use pn- overrides here to avoid affecting' 115 f.write(('# NOTE: We use pn- overrides here to avoid affecting'
108 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n')) 116 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n'))
109 f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree)) 117 f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree))
110 b_is_s = use_external_build(same_dir, no_same_dir, d) 118 b_is_s = use_external_build(same_dir, no_same_dir, d)
111 if b_is_s: 119 if b_is_s:
112 f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree)) 120 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
113 f.write('\n') 121 f.write('\n')
114 if rev: 122 if revs:
115 f.write('# initial_rev: %s\n' % rev) 123 for name, rev in revs.items():
124 f.write('# initial_rev %s: %s\n' % (name, rev))
116 if copied: 125 if copied:
117 f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE'))) 126 f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE')))
118 f.write('# original_files: %s\n' % ' '.join(copied)) 127 f.write('# original_files: %s\n' % ' '.join(copied))
119 return af 128 return af
120 129
121def _cleanup_on_error(rf, srctree): 130def _cleanup_on_error(rd, srctree):
122 rfp = os.path.split(rf)[0] # recipe folder 131 if os.path.exists(rd):
123 rfpp = os.path.split(rfp)[0] # recipes folder 132 shutil.rmtree(rd)
124 if os.path.exists(rfp):
125 shutil.rmtree(rfp)
126 if not len(os.listdir(rfpp)):
127 os.rmdir(rfpp)
128 srctree = os.path.abspath(srctree) 133 srctree = os.path.abspath(srctree)
129 if os.path.exists(srctree): 134 if os.path.exists(srctree):
130 shutil.rmtree(srctree) 135 shutil.rmtree(srctree)
131 136
132def _upgrade_error(e, rf, srctree, keep_failure=False, extramsg=None): 137def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None):
133 if rf and not keep_failure: 138 if not keep_failure:
134 _cleanup_on_error(rf, srctree) 139 _cleanup_on_error(rd, srctree)
135 logger.error(e) 140 logger.error(e)
136 if extramsg: 141 if extramsg:
137 logger.error(extramsg) 142 logger.error(extramsg)
@@ -178,12 +183,16 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
178 uri, rev = _get_uri(crd) 183 uri, rev = _get_uri(crd)
179 if srcrev: 184 if srcrev:
180 rev = srcrev 185 rev = srcrev
186 paths = [srctree]
181 if uri.startswith('git://') or uri.startswith('gitsm://'): 187 if uri.startswith('git://') or uri.startswith('gitsm://'):
182 __run('git fetch') 188 __run('git fetch')
183 __run('git checkout %s' % rev) 189 __run('git checkout %s' % rev)
184 __run('git tag -f devtool-base-new') 190 __run('git tag -f devtool-base-new')
185 md5 = None 191 __run('git submodule update --recursive')
186 sha256 = None 192 __run('git submodule foreach \'git tag -f devtool-base-new\'')
193 (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'')
194 paths += [os.path.join(srctree, p) for p in stdout.splitlines()]
195 checksums = {}
187 _, _, _, _, _, params = bb.fetch2.decodeurl(uri) 196 _, _, _, _, _, params = bb.fetch2.decodeurl(uri)
188 srcsubdir_rel = params.get('destsuffix', 'git') 197 srcsubdir_rel = params.get('destsuffix', 'git')
189 if not srcbranch: 198 if not srcbranch:
@@ -191,14 +200,15 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
191 get_branch = [x.strip() for x in check_branch.splitlines()] 200 get_branch = [x.strip() for x in check_branch.splitlines()]
192 # Remove HEAD reference point and drop remote prefix 201 # Remove HEAD reference point and drop remote prefix
193 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] 202 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
194 if 'master' in get_branch: 203 if len(get_branch) == 1:
195 # If it is master, we do not need to append 'branch=master' as this is default. 204 # If srcrev is on only ONE branch, then use that branch
196 # Even with the case where get_branch has multiple objects, if 'master' is one
197 # of them, we should default take from 'master'
198 srcbranch = ''
199 elif len(get_branch) == 1:
200 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch'
201 srcbranch = get_branch[0] 205 srcbranch = get_branch[0]
206 elif 'main' in get_branch:
207 # If srcrev is on multiple branches, then choose 'main' if it is one of them
208 srcbranch = 'main'
209 elif 'master' in get_branch:
210 # Otherwise choose 'master' if it is one of the branches
211 srcbranch = 'master'
202 else: 212 else:
203 # If get_branch contains more than one objects, then display error and exit. 213 # If get_branch contains more than one objects, then display error and exit.
204 mbrch = '\n ' + '\n '.join(get_branch) 214 mbrch = '\n ' + '\n '.join(get_branch)
@@ -215,9 +225,6 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
215 if ftmpdir and keep_temp: 225 if ftmpdir and keep_temp:
216 logger.info('Fetch temp directory is %s' % ftmpdir) 226 logger.info('Fetch temp directory is %s' % ftmpdir)
217 227
218 md5 = checksums['md5sum']
219 sha256 = checksums['sha256sum']
220
221 tmpsrctree = _get_srctree(tmpdir) 228 tmpsrctree = _get_srctree(tmpdir)
222 srctree = os.path.abspath(srctree) 229 srctree = os.path.abspath(srctree)
223 srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir) 230 srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir)
@@ -251,30 +258,50 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
251 __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) 258 __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv))
252 __run('git tag -f devtool-base-%s' % newpv) 259 __run('git tag -f devtool-base-%s' % newpv)
253 260
254 (stdout, _) = __run('git rev-parse HEAD') 261 revs = {}
255 rev = stdout.rstrip() 262 for path in paths:
263 (stdout, _) = _run('git rev-parse HEAD', cwd=path)
264 revs[os.path.relpath(path, srctree)] = stdout.rstrip()
256 265
257 if no_patch: 266 if no_patch:
258 patches = oe.recipeutils.get_recipe_patches(crd) 267 patches = oe.recipeutils.get_recipe_patches(crd)
259 if patches: 268 if patches:
260 logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches])) 269 logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches]))
261 else: 270 else:
262 __run('git checkout devtool-patched -b %s' % branch) 271 for path in paths:
263 skiptag = False 272 _run('git checkout devtool-patched -b %s' % branch, cwd=path)
264 try: 273 (stdout, _) = _run('git branch --list devtool-override-*', cwd=path)
265 __run('git rebase %s' % rev) 274 branches_to_rebase = [branch] + stdout.split()
266 except bb.process.ExecutionError as e: 275 target_branch = revs[os.path.relpath(path, srctree)]
267 skiptag = True 276
268 if 'conflict' in e.stdout: 277 # There is a bug (or feature?) in git rebase where if a commit with
269 logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip())) 278 # a note is fully rebased away by being part of an old commit, the
270 else: 279 # note is still attached to the old commit. Avoid this by making
271 logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) 280 # sure all old devtool related commits have a note attached to them
272 if not skiptag: 281 # (this assumes git config notes.rewriteMode is set to ignore).
273 if uri.startswith('git://') or uri.startswith('gitsm://'): 282 (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
274 suffix = 'new' 283 for rev in stdout.splitlines():
275 else: 284 if not oe.patch.GitApplyTree.getNotes(path, rev):
276 suffix = newpv 285 oe.patch.GitApplyTree.addNote(path, rev, "dummy")
277 __run('git tag -f devtool-patched-%s' % suffix) 286
287 for b in branches_to_rebase:
288 logger.info("Rebasing {} onto {}".format(b, target_branch))
289 _run('git checkout %s' % b, cwd=path)
290 try:
291 _run('git rebase %s' % target_branch, cwd=path)
292 except bb.process.ExecutionError as e:
293 if 'conflict' in e.stdout:
294 logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip()))
295 _run('git rebase --abort', cwd=path)
296 else:
297 logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
298
299 # Remove any dummy notes added above.
300 (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
301 for rev in stdout.splitlines():
302 oe.patch.GitApplyTree.removeNote(path, rev, "dummy")
303
304 _run('git checkout %s' % branch, cwd=path)
278 305
279 if tmpsrctree: 306 if tmpsrctree:
280 if keep_temp: 307 if keep_temp:
@@ -284,7 +311,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
284 if tmpdir != tmpsrctree: 311 if tmpdir != tmpsrctree:
285 shutil.rmtree(tmpdir) 312 shutil.rmtree(tmpdir)
286 313
287 return (rev, md5, sha256, srcbranch, srcsubdir_rel) 314 return (revs, checksums, srcbranch, srcsubdir_rel)
288 315
289def _add_license_diff_to_recipe(path, diff): 316def _add_license_diff_to_recipe(path, diff):
290 notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'. 317 notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'.
@@ -305,7 +332,7 @@ def _add_license_diff_to_recipe(path, diff):
305 f.write("\n#\n\n".encode()) 332 f.write("\n#\n\n".encode())
306 f.write(orig_content) 333 f.write(orig_content)
307 334
308def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): 335def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
309 """Creates the new recipe under workspace""" 336 """Creates the new recipe under workspace"""
310 337
311 bpn = rd.getVar('BPN') 338 bpn = rd.getVar('BPN')
@@ -336,7 +363,10 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
336 replacing = True 363 replacing = True
337 new_src_uri = [] 364 new_src_uri = []
338 for entry in src_uri: 365 for entry in src_uri:
339 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry) 366 try:
367 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
368 except bb.fetch2.MalformedUrl as e:
369 raise DevtoolError("Could not decode SRC_URI: {}".format(e))
340 if replacing and scheme in ['git', 'gitsm']: 370 if replacing and scheme in ['git', 'gitsm']:
341 branch = params.get('branch', 'master') 371 branch = params.get('branch', 'master')
342 if rd.expand(branch) != srcbranch: 372 if rd.expand(branch) != srcbranch:
@@ -374,30 +404,39 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
374 addnames.append(params['name']) 404 addnames.append(params['name'])
375 # Find what's been set in the original recipe 405 # Find what's been set in the original recipe
376 oldnames = [] 406 oldnames = []
407 oldsums = []
377 noname = False 408 noname = False
378 for varflag in rd.getVarFlags('SRC_URI'): 409 for varflag in rd.getVarFlags('SRC_URI'):
379 if varflag.endswith(('.md5sum', '.sha256sum')): 410 for checksum in checksums:
380 name = varflag.rsplit('.', 1)[0] 411 if varflag.endswith('.' + checksum):
381 if name not in oldnames: 412 name = varflag.rsplit('.', 1)[0]
382 oldnames.append(name) 413 if name not in oldnames:
383 elif varflag in ['md5sum', 'sha256sum']: 414 oldnames.append(name)
384 noname = True 415 oldsums.append(checksum)
416 elif varflag == checksum:
417 noname = True
418 oldsums.append(checksum)
385 # Even if SRC_URI has named entries it doesn't have to actually use the name 419 # Even if SRC_URI has named entries it doesn't have to actually use the name
386 if noname and addnames and addnames[0] not in oldnames: 420 if noname and addnames and addnames[0] not in oldnames:
387 addnames = [] 421 addnames = []
388 # Drop any old names (the name actually might include ${PV}) 422 # Drop any old names (the name actually might include ${PV})
389 for name in oldnames: 423 for name in oldnames:
390 if name not in newnames: 424 if name not in newnames:
391 newvalues['SRC_URI[%s.md5sum]' % name] = None 425 for checksum in oldsums:
392 newvalues['SRC_URI[%s.sha256sum]' % name] = None 426 newvalues['SRC_URI[%s.%s]' % (name, checksum)] = None
393 427
394 if sha256: 428 nameprefix = '%s.' % addnames[0] if addnames else ''
395 if addnames: 429
396 nameprefix = '%s.' % addnames[0] 430 # md5sum is deprecated, remove any traces of it. If it was the only old
397 else: 431 # checksum, then replace it with the default checksums.
398 nameprefix = '' 432 if 'md5sum' in oldsums:
399 newvalues['SRC_URI[%smd5sum]' % nameprefix] = None 433 newvalues['SRC_URI[%smd5sum]' % nameprefix] = None
400 newvalues['SRC_URI[%ssha256sum]' % nameprefix] = sha256 434 oldsums.remove('md5sum')
435 if not oldsums:
436 oldsums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
437
438 for checksum in oldsums:
439 newvalues['SRC_URI[%s%s]' % (nameprefix, checksum)] = checksums[checksum]
401 440
402 if srcsubdir_new != srcsubdir_old: 441 if srcsubdir_new != srcsubdir_old:
403 s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR')) 442 s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR'))
@@ -422,10 +461,11 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
422 newvalues["LIC_FILES_CHKSUM"] = newlicchksum 461 newvalues["LIC_FILES_CHKSUM"] = newlicchksum
423 _add_license_diff_to_recipe(fullpath, license_diff) 462 _add_license_diff_to_recipe(fullpath, license_diff)
424 463
464 tinfoil.modified_files()
425 try: 465 try:
426 rd = tinfoil.parse_recipe_file(fullpath, False) 466 rd = tinfoil.parse_recipe_file(fullpath, False)
427 except bb.tinfoil.TinfoilCommandFailed as e: 467 except bb.tinfoil.TinfoilCommandFailed as e:
428 _upgrade_error(e, fullpath, srctree, keep_failure, 'Parsing of upgraded recipe failed') 468 _upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed')
429 oe.recipeutils.patch_recipe(rd, fullpath, newvalues) 469 oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
430 470
431 return fullpath, copied 471 return fullpath, copied
@@ -434,7 +474,7 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
434def _check_git_config(): 474def _check_git_config():
435 def getconfig(name): 475 def getconfig(name):
436 try: 476 try:
437 value = bb.process.run('git config --global %s' % name)[0].strip() 477 value = bb.process.run('git config %s' % name)[0].strip()
438 except bb.process.ExecutionError as e: 478 except bb.process.ExecutionError as e:
439 if e.exitcode == 1: 479 if e.exitcode == 1:
440 value = None 480 value = None
@@ -521,6 +561,8 @@ def upgrade(args, config, basepath, workspace):
521 else: 561 else:
522 srctree = standard.get_default_srctree(config, pn) 562 srctree = standard.get_default_srctree(config, pn)
523 563
564 srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
565
524 # try to automatically discover latest version and revision if not provided on command line 566 # try to automatically discover latest version and revision if not provided on command line
525 if not args.version and not args.srcrev: 567 if not args.version and not args.srcrev:
526 version_info = oe.recipeutils.get_recipe_upstream_version(rd) 568 version_info = oe.recipeutils.get_recipe_upstream_version(rd)
@@ -550,21 +592,20 @@ def upgrade(args, config, basepath, workspace):
550 try: 592 try:
551 logger.info('Extracting current version source...') 593 logger.info('Extracting current version source...')
552 rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) 594 rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
553 old_licenses = _extract_licenses(srctree, (rd.getVar('LIC_FILES_CHKSUM') or "")) 595 old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
554 logger.info('Extracting upgraded version source...') 596 logger.info('Extracting upgraded version source...')
555 rev2, md5, sha256, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch, 597 rev2, checksums, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch,
556 args.srcrev, args.srcbranch, args.branch, args.keep_temp, 598 args.srcrev, args.srcbranch, args.branch, args.keep_temp,
557 tinfoil, rd) 599 tinfoil, rd)
558 new_licenses = _extract_licenses(srctree, (rd.getVar('LIC_FILES_CHKSUM') or "")) 600 new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
559 license_diff = _generate_license_diff(old_licenses, new_licenses) 601 license_diff = _generate_license_diff(old_licenses, new_licenses)
560 rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) 602 rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
561 except bb.process.CmdError as e: 603 except (bb.process.CmdError, DevtoolError) as e:
562 _upgrade_error(e, rf, srctree, args.keep_failure) 604 recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN'))
563 except DevtoolError as e: 605 _upgrade_error(e, recipedir, srctree, args.keep_failure)
564 _upgrade_error(e, rf, srctree, args.keep_failure)
565 standard._add_md5(config, pn, os.path.dirname(rf)) 606 standard._add_md5(config, pn, os.path.dirname(rf))
566 607
567 af = _write_append(rf, srctree, args.same_dir, args.no_same_dir, rev2, 608 af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2,
568 copied, config.workspace_path, rd) 609 copied, config.workspace_path, rd)
569 standard._add_md5(config, pn, af) 610 standard._add_md5(config, pn, af)
570 611
@@ -574,6 +615,9 @@ def upgrade(args, config, basepath, workspace):
574 logger.info('New recipe is %s' % rf) 615 logger.info('New recipe is %s' % rf)
575 if license_diff: 616 if license_diff:
576 logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.') 617 logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.')
618 preferred_version = rd.getVar('PREFERRED_VERSION_%s' % rd.getVar('PN'))
619 if preferred_version:
620 logger.warning('Version is pinned to %s via PREFERRED_VERSION; it may need adjustment to match the new version before any further steps are taken' % preferred_version)
577 finally: 621 finally:
578 tinfoil.shutdown() 622 tinfoil.shutdown()
579 return 0 623 return 0
@@ -605,7 +649,7 @@ def check_upgrade_status(args, config, basepath, workspace):
605 for result in results: 649 for result in results:
606 # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason 650 # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason
607 if args.all or result[1] != 'MATCH': 651 if args.all or result[1] != 'MATCH':
608 logger.info("{:25} {:15} {:15} {} {} {}".format( result[0], 652 print("{:25} {:15} {:15} {} {} {}".format( result[0],
609 result[2], 653 result[2],
610 result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"), 654 result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"),
611 result[4], 655 result[4],
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py
index e9d52bb67b..341e893305 100644
--- a/scripts/lib/recipetool/append.py
+++ b/scripts/lib/recipetool/append.py
@@ -18,6 +18,7 @@ import shutil
18import scriptutils 18import scriptutils
19import errno 19import errno
20from collections import defaultdict 20from collections import defaultdict
21import difflib
21 22
22logger = logging.getLogger('recipetool') 23logger = logging.getLogger('recipetool')
23 24
@@ -49,7 +50,7 @@ def find_target_file(targetpath, d, pkglist=None):
49 '/etc/group': '/etc/group should be managed through the useradd and extrausers classes', 50 '/etc/group': '/etc/group should be managed through the useradd and extrausers classes',
50 '/etc/shadow': '/etc/shadow should be managed through the useradd and extrausers classes', 51 '/etc/shadow': '/etc/shadow should be managed through the useradd and extrausers classes',
51 '/etc/gshadow': '/etc/gshadow should be managed through the useradd and extrausers classes', 52 '/etc/gshadow': '/etc/gshadow should be managed through the useradd and extrausers classes',
52 '${sysconfdir}/hostname': '${sysconfdir}/hostname contents should be set by setting hostname_pn-base-files = "value" in configuration',} 53 '${sysconfdir}/hostname': '${sysconfdir}/hostname contents should be set by setting hostname:pn-base-files = "value" in configuration',}
53 54
54 for pthspec, message in invalidtargets.items(): 55 for pthspec, message in invalidtargets.items():
55 if fnmatch.fnmatchcase(targetpath, d.expand(pthspec)): 56 if fnmatch.fnmatchcase(targetpath, d.expand(pthspec)):
@@ -72,15 +73,15 @@ def find_target_file(targetpath, d, pkglist=None):
72 # This does assume that PN comes before other values, but that's a fairly safe assumption 73 # This does assume that PN comes before other values, but that's a fairly safe assumption
73 for line in f: 74 for line in f:
74 if line.startswith('PN:'): 75 if line.startswith('PN:'):
75 pn = line.split(':', 1)[1].strip() 76 pn = line.split(': ', 1)[1].strip()
76 elif line.startswith('FILES_INFO:'): 77 elif line.startswith('FILES_INFO'):
77 val = line.split(':', 1)[1].strip() 78 val = line.split(': ', 1)[1].strip()
78 dictval = json.loads(val) 79 dictval = json.loads(val)
79 for fullpth in dictval.keys(): 80 for fullpth in dictval.keys():
80 if fnmatch.fnmatchcase(fullpth, targetpath): 81 if fnmatch.fnmatchcase(fullpth, targetpath):
81 recipes[targetpath].append(pn) 82 recipes[targetpath].append(pn)
82 elif line.startswith('pkg_preinst_') or line.startswith('pkg_postinst_'): 83 elif line.startswith('pkg_preinst:') or line.startswith('pkg_postinst:'):
83 scriptval = line.split(':', 1)[1].strip().encode('utf-8').decode('unicode_escape') 84 scriptval = line.split(': ', 1)[1].strip().encode('utf-8').decode('unicode_escape')
84 if 'update-alternatives --install %s ' % targetpath in scriptval: 85 if 'update-alternatives --install %s ' % targetpath in scriptval:
85 recipes[targetpath].append('?%s' % pn) 86 recipes[targetpath].append('?%s' % pn)
86 elif targetpath_re.search(scriptval): 87 elif targetpath_re.search(scriptval):
@@ -299,7 +300,10 @@ def appendfile(args):
299 if st.st_mode & stat.S_IXUSR: 300 if st.st_mode & stat.S_IXUSR:
300 perms = '0755' 301 perms = '0755'
301 install = {args.newfile: (args.targetpath, perms)} 302 install = {args.newfile: (args.targetpath, perms)}
302 oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: sourcepath}, install, wildcardver=args.wildcard_version, machine=args.machine) 303 if sourcepath:
304 sourcepath = os.path.basename(sourcepath)
305 oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: {'newname' : sourcepath}}, install, wildcardver=args.wildcard_version, machine=args.machine)
306 tinfoil.modified_files()
303 return 0 307 return 0
304 else: 308 else:
305 if alternative_pns: 309 if alternative_pns:
@@ -327,6 +331,7 @@ def appendsrc(args, files, rd, extralines=None):
327 331
328 copyfiles = {} 332 copyfiles = {}
329 extralines = extralines or [] 333 extralines = extralines or []
334 params = []
330 for newfile, srcfile in files.items(): 335 for newfile, srcfile in files.items():
331 src_destdir = os.path.dirname(srcfile) 336 src_destdir = os.path.dirname(srcfile)
332 if not args.use_workdir: 337 if not args.use_workdir:
@@ -337,25 +342,46 @@ def appendsrc(args, files, rd, extralines=None):
337 src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir) 342 src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir)
338 src_destdir = os.path.normpath(src_destdir) 343 src_destdir = os.path.normpath(src_destdir)
339 344
340 source_uri = 'file://{0}'.format(os.path.basename(srcfile))
341 if src_destdir and src_destdir != '.': 345 if src_destdir and src_destdir != '.':
342 source_uri += ';subdir={0}'.format(src_destdir) 346 params.append({'subdir': src_destdir})
343
344 simple = bb.fetch.URI(source_uri)
345 simple.params = {}
346 simple_str = str(simple)
347 if simple_str in simplified:
348 existing = simplified[simple_str]
349 if source_uri != existing:
350 logger.warning('{0!r} is already in SRC_URI, with different parameters: {1!r}, not adding'.format(source_uri, existing))
351 else:
352 logger.warning('{0!r} is already in SRC_URI, not adding'.format(source_uri))
353 else: 347 else:
354 extralines.append('SRC_URI += {0}'.format(source_uri)) 348 params.append({})
355 copyfiles[newfile] = srcfile 349
356 350 copyfiles[newfile] = {'newname' : os.path.basename(srcfile)}
357 oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines) 351
358 352 dry_run_output = None
353 dry_run_outdir = None
354 if args.dry_run:
355 import tempfile
356 dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
357 dry_run_outdir = dry_run_output.name
358
359 appendfile, _ = oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines, params=params,
360 redirect_output=dry_run_outdir, update_original_recipe=args.update_recipe)
361 if not appendfile:
362 return
363 if args.dry_run:
364 output = ''
365 appendfilename = os.path.basename(appendfile)
366 newappendfile = appendfile
367 if appendfile and os.path.exists(appendfile):
368 with open(appendfile, 'r') as f:
369 oldlines = f.readlines()
370 else:
371 appendfile = '/dev/null'
372 oldlines = []
373
374 with open(os.path.join(dry_run_outdir, appendfilename), 'r') as f:
375 newlines = f.readlines()
376 diff = difflib.unified_diff(oldlines, newlines, appendfile, newappendfile)
377 difflines = list(diff)
378 if difflines:
379 output += ''.join(difflines)
380 if output:
381 logger.info('Diff of changed files:\n%s' % output)
382 else:
383 logger.info('No changed files')
384 tinfoil.modified_files()
359 385
360def appendsrcfiles(parser, args): 386def appendsrcfiles(parser, args):
361 recipedata = _parse_recipe(args.recipe, tinfoil) 387 recipedata = _parse_recipe(args.recipe, tinfoil)
@@ -435,6 +461,8 @@ def register_commands(subparsers):
435 help='Create/update a bbappend to add or replace source files', 461 help='Create/update a bbappend to add or replace source files',
436 description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.') 462 description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.')
437 parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path) 463 parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path)
464 parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
465 parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
438 parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path) 466 parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path)
439 parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True) 467 parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True)
440 468
@@ -442,6 +470,8 @@ def register_commands(subparsers):
442 parents=[common_src], 470 parents=[common_src],
443 help='Create/update a bbappend to add or replace a source file', 471 help='Create/update a bbappend to add or replace a source file',
444 description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.') 472 description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.')
473 parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
474 parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
445 parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path) 475 parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path)
446 parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path) 476 parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path)
447 parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True) 477 parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True)
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py
index 566c75369a..8e9ff38db6 100644
--- a/scripts/lib/recipetool/create.py
+++ b/scripts/lib/recipetool/create.py
@@ -115,8 +115,8 @@ class RecipeHandler(object):
115 for line in f: 115 for line in f:
116 if line.startswith('PN:'): 116 if line.startswith('PN:'):
117 pn = line.split(':', 1)[-1].strip() 117 pn = line.split(':', 1)[-1].strip()
118 elif line.startswith('FILES_INFO:'): 118 elif line.startswith('FILES_INFO:%s:' % pkg):
119 val = line.split(':', 1)[1].strip() 119 val = line.split(': ', 1)[1].strip()
120 dictval = json.loads(val) 120 dictval = json.loads(val)
121 for fullpth in sorted(dictval): 121 for fullpth in sorted(dictval):
122 if fullpth.startswith(includedir) and fullpth.endswith('.h'): 122 if fullpth.startswith(includedir) and fullpth.endswith('.h'):
@@ -366,7 +366,7 @@ def supports_srcrev(uri):
366def reformat_git_uri(uri): 366def reformat_git_uri(uri):
367 '''Convert any http[s]://....git URI into git://...;protocol=http[s]''' 367 '''Convert any http[s]://....git URI into git://...;protocol=http[s]'''
368 checkuri = uri.split(';', 1)[0] 368 checkuri = uri.split(';', 1)[0]
369 if checkuri.endswith('.git') or '/git/' in checkuri or re.match('https?://github.com/[^/]+/[^/]+/?$', checkuri): 369 if checkuri.endswith('.git') or '/git/' in checkuri or re.match('https?://git(hub|lab).com/[^/]+/[^/]+/?$', checkuri):
370 # Appends scheme if the scheme is missing 370 # Appends scheme if the scheme is missing
371 if not '://' in uri: 371 if not '://' in uri:
372 uri = 'git://' + uri 372 uri = 'git://' + uri
@@ -423,6 +423,36 @@ def create_recipe(args):
423 storeTagName = '' 423 storeTagName = ''
424 pv_srcpv = False 424 pv_srcpv = False
425 425
426 handled = []
427 classes = []
428
429 # Find all plugins that want to register handlers
430 logger.debug('Loading recipe handlers')
431 raw_handlers = []
432 for plugin in plugins:
433 if hasattr(plugin, 'register_recipe_handlers'):
434 plugin.register_recipe_handlers(raw_handlers)
435 # Sort handlers by priority
436 handlers = []
437 for i, handler in enumerate(raw_handlers):
438 if isinstance(handler, tuple):
439 handlers.append((handler[0], handler[1], i))
440 else:
441 handlers.append((handler, 0, i))
442 handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True)
443 for handler, priority, _ in handlers:
444 logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority))
445 setattr(handler, '_devtool', args.devtool)
446 handlers = [item[0] for item in handlers]
447
448 fetchuri = None
449 for handler in handlers:
450 if hasattr(handler, 'process_url'):
451 ret = handler.process_url(args, classes, handled, extravalues)
452 if 'url' in handled and ret:
453 fetchuri = ret
454 break
455
426 if os.path.isfile(source): 456 if os.path.isfile(source):
427 source = 'file://%s' % os.path.abspath(source) 457 source = 'file://%s' % os.path.abspath(source)
428 458
@@ -431,11 +461,12 @@ def create_recipe(args):
431 if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source): 461 if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source):
432 logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).') 462 logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).')
433 # Fetch a URL 463 # Fetch a URL
434 fetchuri = reformat_git_uri(urldefrag(source)[0]) 464 if not fetchuri:
465 fetchuri = reformat_git_uri(urldefrag(source)[0])
435 if args.binary: 466 if args.binary:
436 # Assume the archive contains the directory structure verbatim 467 # Assume the archive contains the directory structure verbatim
437 # so we need to extract to a subdirectory 468 # so we need to extract to a subdirectory
438 fetchuri += ';subdir=${BP}' 469 fetchuri += ';subdir=${BPN}'
439 srcuri = fetchuri 470 srcuri = fetchuri
440 rev_re = re.compile(';rev=([^;]+)') 471 rev_re = re.compile(';rev=([^;]+)')
441 res = rev_re.search(srcuri) 472 res = rev_re.search(srcuri)
@@ -478,6 +509,9 @@ def create_recipe(args):
478 storeTagName = params['tag'] 509 storeTagName = params['tag']
479 params['nobranch'] = '1' 510 params['nobranch'] = '1'
480 del params['tag'] 511 del params['tag']
512 # Assume 'master' branch if not set
513 if scheme in ['git', 'gitsm'] and 'branch' not in params and 'nobranch' not in params:
514 params['branch'] = 'master'
481 fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) 515 fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
482 516
483 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') 517 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
@@ -527,10 +561,9 @@ def create_recipe(args):
527 # Remove HEAD reference point and drop remote prefix 561 # Remove HEAD reference point and drop remote prefix
528 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] 562 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
529 if 'master' in get_branch: 563 if 'master' in get_branch:
530 # If it is master, we do not need to append 'branch=master' as this is default.
531 # Even with the case where get_branch has multiple objects, if 'master' is one 564 # Even with the case where get_branch has multiple objects, if 'master' is one
532 # of them, we should default take from 'master' 565 # of them, we should default take from 'master'
533 srcbranch = '' 566 srcbranch = 'master'
534 elif len(get_branch) == 1: 567 elif len(get_branch) == 1:
535 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' 568 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch'
536 srcbranch = get_branch[0] 569 srcbranch = get_branch[0]
@@ -543,8 +576,8 @@ def create_recipe(args):
543 # Since we might have a value in srcbranch, we need to 576 # Since we might have a value in srcbranch, we need to
544 # recontruct the srcuri to include 'branch' in params. 577 # recontruct the srcuri to include 'branch' in params.
545 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri) 578 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri)
546 if srcbranch: 579 if scheme in ['git', 'gitsm']:
547 params['branch'] = srcbranch 580 params['branch'] = srcbranch or 'master'
548 581
549 if storeTagName and scheme in ['git', 'gitsm']: 582 if storeTagName and scheme in ['git', 'gitsm']:
550 # Check srcrev using tag and check validity of the tag 583 # Check srcrev using tag and check validity of the tag
@@ -603,7 +636,7 @@ def create_recipe(args):
603 splitline = line.split() 636 splitline = line.split()
604 if len(splitline) > 1: 637 if len(splitline) > 1:
605 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): 638 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]):
606 srcuri = reformat_git_uri(splitline[1]) 639 srcuri = reformat_git_uri(splitline[1]) + ';branch=master'
607 srcsubdir = 'git' 640 srcsubdir = 'git'
608 break 641 break
609 642
@@ -636,8 +669,6 @@ def create_recipe(args):
636 # We'll come back and replace this later in handle_license_vars() 669 # We'll come back and replace this later in handle_license_vars()
637 lines_before.append('##LICENSE_PLACEHOLDER##') 670 lines_before.append('##LICENSE_PLACEHOLDER##')
638 671
639 handled = []
640 classes = []
641 672
642 # FIXME This is kind of a hack, we probably ought to be using bitbake to do this 673 # FIXME This is kind of a hack, we probably ought to be using bitbake to do this
643 pn = None 674 pn = None
@@ -675,8 +706,10 @@ def create_recipe(args):
675 if not srcuri: 706 if not srcuri:
676 lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)') 707 lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)')
677 lines_before.append('SRC_URI = "%s"' % srcuri) 708 lines_before.append('SRC_URI = "%s"' % srcuri)
709 shown_checksums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
678 for key, value in sorted(checksums.items()): 710 for key, value in sorted(checksums.items()):
679 lines_before.append('SRC_URI[%s] = "%s"' % (key, value)) 711 if key in shown_checksums:
712 lines_before.append('SRC_URI[%s] = "%s"' % (key, value))
680 if srcuri and supports_srcrev(srcuri): 713 if srcuri and supports_srcrev(srcuri):
681 lines_before.append('') 714 lines_before.append('')
682 lines_before.append('# Modify these as desired') 715 lines_before.append('# Modify these as desired')
@@ -688,7 +721,7 @@ def create_recipe(args):
688 srcpvprefix = 'svnr' 721 srcpvprefix = 'svnr'
689 else: 722 else:
690 srcpvprefix = scheme 723 srcpvprefix = scheme
691 lines_before.append('PV = "%s+%s${SRCPV}"' % (realpv or '1.0', srcpvprefix)) 724 lines_before.append('PV = "%s+%s"' % (realpv or '1.0', srcpvprefix))
692 pv_srcpv = True 725 pv_srcpv = True
693 if not args.autorev and srcrev == '${AUTOREV}': 726 if not args.autorev and srcrev == '${AUTOREV}':
694 if os.path.exists(os.path.join(srctree, '.git')): 727 if os.path.exists(os.path.join(srctree, '.git')):
@@ -710,31 +743,12 @@ def create_recipe(args):
710 lines_after.append('') 743 lines_after.append('')
711 744
712 if args.binary: 745 if args.binary:
713 lines_after.append('INSANE_SKIP_${PN} += "already-stripped"') 746 lines_after.append('INSANE_SKIP:${PN} += "already-stripped"')
714 lines_after.append('') 747 lines_after.append('')
715 748
716 if args.npm_dev: 749 if args.npm_dev:
717 extravalues['NPM_INSTALL_DEV'] = 1 750 extravalues['NPM_INSTALL_DEV'] = 1
718 751
719 # Find all plugins that want to register handlers
720 logger.debug('Loading recipe handlers')
721 raw_handlers = []
722 for plugin in plugins:
723 if hasattr(plugin, 'register_recipe_handlers'):
724 plugin.register_recipe_handlers(raw_handlers)
725 # Sort handlers by priority
726 handlers = []
727 for i, handler in enumerate(raw_handlers):
728 if isinstance(handler, tuple):
729 handlers.append((handler[0], handler[1], i))
730 else:
731 handlers.append((handler, 0, i))
732 handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True)
733 for handler, priority, _ in handlers:
734 logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority))
735 setattr(handler, '_devtool', args.devtool)
736 handlers = [item[0] for item in handlers]
737
738 # Apply the handlers 752 # Apply the handlers
739 if args.binary: 753 if args.binary:
740 classes.append('bin_package') 754 classes.append('bin_package')
@@ -743,6 +757,10 @@ def create_recipe(args):
743 for handler in handlers: 757 for handler in handlers:
744 handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues) 758 handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues)
745 759
760 # native and nativesdk classes are special and must be inherited last
761 # If present, put them at the end of the classes list
762 classes.sort(key=lambda c: c in ("native", "nativesdk"))
763
746 extrafiles = extravalues.pop('extrafiles', {}) 764 extrafiles = extravalues.pop('extrafiles', {})
747 extra_pn = extravalues.pop('PN', None) 765 extra_pn = extravalues.pop('PN', None)
748 extra_pv = extravalues.pop('PV', None) 766 extra_pv = extravalues.pop('PV', None)
@@ -867,8 +885,10 @@ def create_recipe(args):
867 outlines.append('') 885 outlines.append('')
868 outlines.extend(lines_after) 886 outlines.extend(lines_after)
869 887
888 outlines = [ line.rstrip('\n') +"\n" for line in outlines]
889
870 if extravalues: 890 if extravalues:
871 _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=False) 891 _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=True)
872 892
873 if args.extract_to: 893 if args.extract_to:
874 scriptutils.git_convert_standalone_clone(srctree) 894 scriptutils.git_convert_standalone_clone(srctree)
@@ -884,7 +904,7 @@ def create_recipe(args):
884 log_info_cond('Source extracted to %s' % args.extract_to, args.devtool) 904 log_info_cond('Source extracted to %s' % args.extract_to, args.devtool)
885 905
886 if outfile == '-': 906 if outfile == '-':
887 sys.stdout.write('\n'.join(outlines) + '\n') 907 sys.stdout.write(''.join(outlines) + '\n')
888 else: 908 else:
889 with open(outfile, 'w') as f: 909 with open(outfile, 'w') as f:
890 lastline = None 910 lastline = None
@@ -892,9 +912,10 @@ def create_recipe(args):
892 if not lastline and not line: 912 if not lastline and not line:
893 # Skip extra blank lines 913 # Skip extra blank lines
894 continue 914 continue
895 f.write('%s\n' % line) 915 f.write('%s' % line)
896 lastline = line 916 lastline = line
897 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) 917 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool)
918 tinfoil.modified_files()
898 919
899 if tempsrc: 920 if tempsrc:
900 if args.keep_temp: 921 if args.keep_temp:
@@ -917,6 +938,22 @@ def split_value(value):
917 else: 938 else:
918 return value 939 return value
919 940
941def fixup_license(value):
942 # Ensure licenses with OR starts and ends with brackets
943 if '|' in value:
944 return '(' + value + ')'
945 return value
946
947def tidy_licenses(value):
948 """Flat, split and sort licenses"""
949 from oe.license import flattened_licenses
950 def _choose(a, b):
951 str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold)
952 return ["(%s | %s)" % (str_a, str_b)]
953 if not isinstance(value, str):
954 value = " & ".join(value)
955 return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold)
956
920def handle_license_vars(srctree, lines_before, handled, extravalues, d): 957def handle_license_vars(srctree, lines_before, handled, extravalues, d):
921 lichandled = [x for x in handled if x[0] == 'license'] 958 lichandled = [x for x in handled if x[0] == 'license']
922 if lichandled: 959 if lichandled:
@@ -930,10 +967,13 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
930 lines = [] 967 lines = []
931 if licvalues: 968 if licvalues:
932 for licvalue in licvalues: 969 for licvalue in licvalues:
933 if not licvalue[0] in licenses: 970 license = licvalue[0]
934 licenses.append(licvalue[0]) 971 lics = tidy_licenses(fixup_license(license))
972 lics = [lic for lic in lics if lic not in licenses]
973 if len(lics):
974 licenses.extend(lics)
935 lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2])) 975 lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2]))
936 if licvalue[0] == 'Unknown': 976 if license == 'Unknown':
937 lic_unknown.append(licvalue[1]) 977 lic_unknown.append(licvalue[1])
938 if lic_unknown: 978 if lic_unknown:
939 lines.append('#') 979 lines.append('#')
@@ -942,9 +982,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
942 for licfile in lic_unknown: 982 for licfile in lic_unknown:
943 lines.append('# %s' % licfile) 983 lines.append('# %s' % licfile)
944 984
945 extra_license = split_value(extravalues.pop('LICENSE', [])) 985 extra_license = tidy_licenses(extravalues.pop('LICENSE', ''))
946 if '&' in extra_license:
947 extra_license.remove('&')
948 if extra_license: 986 if extra_license:
949 if licenses == ['Unknown']: 987 if licenses == ['Unknown']:
950 licenses = extra_license 988 licenses = extra_license
@@ -985,7 +1023,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
985 lines.append('# instead of &. If there is any doubt, check the accompanying documentation') 1023 lines.append('# instead of &. If there is any doubt, check the accompanying documentation')
986 lines.append('# to determine which situation is applicable.') 1024 lines.append('# to determine which situation is applicable.')
987 1025
988 lines.append('LICENSE = "%s"' % ' & '.join(licenses)) 1026 lines.append('LICENSE = "%s"' % ' & '.join(sorted(licenses, key=str.casefold)))
989 lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum)) 1027 lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum))
990 lines.append('') 1028 lines.append('')
991 1029
@@ -1002,118 +1040,170 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
1002 handled.append(('license', licvalues)) 1040 handled.append(('license', licvalues))
1003 return licvalues 1041 return licvalues
1004 1042
1005def get_license_md5sums(d, static_only=False): 1043def get_license_md5sums(d, static_only=False, linenumbers=False):
1006 import bb.utils 1044 import bb.utils
1045 import csv
1007 md5sums = {} 1046 md5sums = {}
1008 if not static_only: 1047 if not static_only and not linenumbers:
1009 # Gather md5sums of license files in common license dir 1048 # Gather md5sums of license files in common license dir
1010 commonlicdir = d.getVar('COMMON_LICENSE_DIR') 1049 commonlicdir = d.getVar('COMMON_LICENSE_DIR')
1011 for fn in os.listdir(commonlicdir): 1050 for fn in os.listdir(commonlicdir):
1012 md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn)) 1051 md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn))
1013 md5sums[md5value] = fn 1052 md5sums[md5value] = fn
1053
1014 # The following were extracted from common values in various recipes 1054 # The following were extracted from common values in various recipes
1015 # (double checking the license against the license file itself, not just 1055 # (double checking the license against the license file itself, not just
1016 # the LICENSE value in the recipe) 1056 # the LICENSE value in the recipe)
1017 md5sums['94d55d512a9ba36caa9b7df079bae19f'] = 'GPLv2' 1057
1018 md5sums['b234ee4d69f5fce4486a80fdaf4a4263'] = 'GPLv2' 1058 # Read license md5sums from csv file
1019 md5sums['59530bdf33659b29e73d4adb9f9f6552'] = 'GPLv2' 1059 scripts_path = os.path.dirname(os.path.realpath(__file__))
1020 md5sums['0636e73ff0215e8d672dc4c32c317bb3'] = 'GPLv2' 1060 for path in (d.getVar('BBPATH').split(':')
1021 md5sums['eb723b61539feef013de476e68b5c50a'] = 'GPLv2' 1061 + [os.path.join(scripts_path, '..', '..')]):
1022 md5sums['751419260aa954499f7abaabaa882bbe'] = 'GPLv2' 1062 csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv')
1023 md5sums['393a5ca445f6965873eca0259a17f833'] = 'GPLv2' 1063 if os.path.isfile(csv_path):
1024 md5sums['12f884d2ae1ff87c09e5b7ccc2c4ca7e'] = 'GPLv2' 1064 with open(csv_path, newline='') as csv_file:
1025 md5sums['8ca43cbc842c2336e835926c2166c28b'] = 'GPLv2' 1065 fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5']
1026 md5sums['ebb5c50ab7cab4baeffba14977030c07'] = 'GPLv2' 1066 reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames)
1027 md5sums['c93c0550bd3173f4504b2cbd8991e50b'] = 'GPLv2' 1067 for row in reader:
1028 md5sums['9ac2e7cff1ddaf48b6eab6028f23ef88'] = 'GPLv2' 1068 if linenumbers:
1029 md5sums['4325afd396febcb659c36b49533135d4'] = 'GPLv2' 1069 md5sums[row['md5sum']] = (
1030 md5sums['18810669f13b87348459e611d31ab760'] = 'GPLv2' 1070 row['license'], row['beginline'], row['endline'], row['md5'])
1031 md5sums['d7810fab7487fb0aad327b76f1be7cd7'] = 'GPLv2' # the Linux kernel's COPYING file 1071 else:
1032 md5sums['bbb461211a33b134d42ed5ee802b37ff'] = 'LGPLv2.1' 1072 md5sums[row['md5sum']] = row['license']
1033 md5sums['7fbc338309ac38fefcd64b04bb903e34'] = 'LGPLv2.1' 1073
1034 md5sums['4fbd65380cdd255951079008b364516c'] = 'LGPLv2.1'
1035 md5sums['2d5025d4aa3495befef8f17206a5b0a1'] = 'LGPLv2.1'
1036 md5sums['fbc093901857fcd118f065f900982c24'] = 'LGPLv2.1'
1037 md5sums['a6f89e2100d9b6cdffcea4f398e37343'] = 'LGPLv2.1'
1038 md5sums['d8045f3b8f929c1cb29a1e3fd737b499'] = 'LGPLv2.1'
1039 md5sums['fad9b3332be894bab9bc501572864b29'] = 'LGPLv2.1'
1040 md5sums['3bf50002aefd002f49e7bb854063f7e7'] = 'LGPLv2'
1041 md5sums['9f604d8a4f8e74f4f5140845a21b6674'] = 'LGPLv2'
1042 md5sums['5f30f0716dfdd0d91eb439ebec522ec2'] = 'LGPLv2'
1043 md5sums['55ca817ccb7d5b5b66355690e9abc605'] = 'LGPLv2'
1044 md5sums['252890d9eee26aab7b432e8b8a616475'] = 'LGPLv2'
1045 md5sums['3214f080875748938ba060314b4f727d'] = 'LGPLv2'
1046 md5sums['db979804f025cf55aabec7129cb671ed'] = 'LGPLv2'
1047 md5sums['d32239bcb673463ab874e80d47fae504'] = 'GPLv3'
1048 md5sums['f27defe1e96c2e1ecd4e0c9be8967949'] = 'GPLv3'
1049 md5sums['6a6a8e020838b23406c81b19c1d46df6'] = 'LGPLv3'
1050 md5sums['3b83ef96387f14655fc854ddc3c6bd57'] = 'Apache-2.0'
1051 md5sums['385c55653886acac3821999a3ccd17b3'] = 'Artistic-1.0 | GPL-2.0' # some perl modules
1052 md5sums['54c7042be62e169199200bc6477f04d1'] = 'BSD-3-Clause'
1053 md5sums['bfe1f75d606912a4111c90743d6c7325'] = 'MPL-1.1'
1054 return md5sums 1074 return md5sums
1055 1075
1056def crunch_license(licfile): 1076def crunch_known_licenses(d):
1057 ''' 1077 '''
1058 Remove non-material text from a license file and then check 1078 Calculate the MD5 checksums for the crunched versions of all common
1059 its md5sum against a known list. This works well for licenses 1079 licenses. Also add additional known checksums.
1060 which contain a copyright statement, but is also a useful way
1061 to handle people's insistence upon reformatting the license text
1062 slightly (with no material difference to the text of the
1063 license).
1064 ''' 1080 '''
1081
1082 crunched_md5sums = {}
1065 1083
1066 import oe.utils 1084 # common licenses
1085 crunched_md5sums['ad4e9d34a2e966dfe9837f18de03266d'] = 'GFDL-1.1-only'
1086 crunched_md5sums['d014fb11a34eb67dc717fdcfc97e60ed'] = 'GFDL-1.2-only'
1087 crunched_md5sums['e020ca655b06c112def28e597ab844f1'] = 'GFDL-1.3-only'
1067 1088
1068 # Note: these are carefully constructed!
1069 license_title_re = re.compile(r'^\(?(#+ *)?(The )?.{1,10} [Ll]icen[sc]e( \(.{1,10}\))?\)?:?$')
1070 license_statement_re = re.compile(r'^(This (project|software) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
1071 copyright_re = re.compile('^(#+)? *Copyright .*$')
1072
1073 crunched_md5sums = {}
1074 # The following two were gleaned from the "forever" npm package 1089 # The following two were gleaned from the "forever" npm package
1075 crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC' 1090 crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC'
1076 crunched_md5sums['eecf6429523cbc9693547cf2db790b5c'] = 'MIT'
1077 # https://github.com/vasi/pixz/blob/master/LICENSE
1078 crunched_md5sums['2f03392b40bbe663597b5bd3cc5ebdb9'] = 'BSD-2-Clause'
1079 # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt 1091 # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt
1080 crunched_md5sums['e72e5dfef0b1a4ca8a3d26a60587db66'] = 'BSD-2-Clause' 1092 crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause'
1081 # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE 1093 # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE
1082 crunched_md5sums['8be76ac6d191671f347ee4916baa637e'] = 'GPLv2' 1094 crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPL-2.0-only'
1083 # https://github.com/datto/dattobd/blob/master/COPYING
1084 # http://git.savannah.gnu.org/cgit/freetype/freetype2.git/tree/docs/GPLv2.TXT
1085 crunched_md5sums['1d65c5ad4bf6489f85f4812bf08ae73d'] = 'GPLv2'
1086 # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt 1095 # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
1087 # http://git.neil.brown.name/?p=mdadm.git;a=blob;f=COPYING;h=d159169d1050894d3ea3b98e1c965c4058208fe1;hb=HEAD 1096 crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPL-2.0-only'
1088 crunched_md5sums['fb530f66a7a89ce920f0e912b5b66d4b'] = 'GPLv2'
1089 # https://github.com/gkos/nrf24/blob/master/COPYING
1090 crunched_md5sums['7b6aaa4daeafdfa6ed5443fd2684581b'] = 'GPLv2'
1091 # https://github.com/josch09/resetusb/blob/master/COPYING
1092 crunched_md5sums['8b8ac1d631a4d220342e83bcf1a1fbc3'] = 'GPLv3'
1093 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1 1097 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1
1094 crunched_md5sums['2ea316ed973ae176e502e2297b574bb3'] = 'LGPLv2.1' 1098 crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPL-2.1-only'
1095 # unixODBC-2.3.4 COPYING 1099 # unixODBC-2.3.4 COPYING
1096 crunched_md5sums['1daebd9491d1e8426900b4fa5a422814'] = 'LGPLv2.1' 1100 crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPL-2.1-only'
1097 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3 1101 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3
1098 crunched_md5sums['2ebfb3bb49b9a48a075cc1425e7f4129'] = 'LGPLv3' 1102 crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPL-3.0-only'
1099 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10 1103 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10
1100 crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0' 1104 crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0'
1101 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/edl-v10 1105
1102 crunched_md5sums['0a9c78c0a398d1bbce4a166757d60387'] = 'EDL-1.0' 1106 # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD
1107 crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause'
1108 # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE
1109 crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause'
1110 # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE
1111 crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause'
1112 # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE
1113 crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause'
1114 # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE
1115 crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause'
1116 # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE
1117 crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause'
1118 # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE
1119 crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause'
1120 # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE
1121 crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause'
1122 # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE
1123 crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause'
1124 # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE
1125 crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT'
1126 # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE
1127 crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT'
1128 # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE
1129 crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0'
1130 # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md
1131 crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0'
1132 # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE
1133 crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0'
1134 # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt
1135 crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0'
1136 # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE
1137 crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0'
1138 # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE
1139 crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense'
1140 # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md
1141 crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib'
1142
1143 commonlicdir = d.getVar('COMMON_LICENSE_DIR')
1144 for fn in sorted(os.listdir(commonlicdir)):
1145 md5value, lictext = crunch_license(os.path.join(commonlicdir, fn))
1146 if md5value not in crunched_md5sums:
1147 crunched_md5sums[md5value] = fn
1148 elif fn != crunched_md5sums[md5value]:
1149 bb.debug(2, "crunched_md5sums['%s'] is already set to '%s' rather than '%s'" % (md5value, crunched_md5sums[md5value], fn))
1150 else:
1151 bb.debug(2, "crunched_md5sums['%s'] is already set to '%s'" % (md5value, crunched_md5sums[md5value]))
1152
1153 return crunched_md5sums
1154
1155def crunch_license(licfile):
1156 '''
1157 Remove non-material text from a license file and then calculate its
1158 md5sum. This works well for licenses that contain a copyright statement,
1159 but is also a useful way to handle people's insistence upon reformatting
1160 the license text slightly (with no material difference to the text of the
1161 license).
1162 '''
1163
1164 import oe.utils
1165
1166 # Note: these are carefully constructed!
1167 license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$')
1168 license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
1169 copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$')
1170 disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$')
1171 email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$')
1172 header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$')
1173 tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$')
1174 url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$')
1175
1103 lictext = [] 1176 lictext = []
1104 with open(licfile, 'r', errors='surrogateescape') as f: 1177 with open(licfile, 'r', errors='surrogateescape') as f:
1105 for line in f: 1178 for line in f:
1106 # Drop opening statements 1179 # Drop opening statements
1107 if copyright_re.match(line): 1180 if copyright_re.match(line):
1108 continue 1181 continue
1182 elif disclaimer_re.match(line):
1183 continue
1184 elif email_re.match(line):
1185 continue
1186 elif header_re.match(line):
1187 continue
1188 elif tag_re.match(line):
1189 continue
1190 elif url_re.match(line):
1191 continue
1109 elif license_title_re.match(line): 1192 elif license_title_re.match(line):
1110 continue 1193 continue
1111 elif license_statement_re.match(line): 1194 elif license_statement_re.match(line):
1112 continue 1195 continue
1113 # Squash spaces, and replace smart quotes, double quotes 1196 # Strip comment symbols
1114 # and backticks with single quotes 1197 line = line.replace('*', '') \
1198 .replace('#', '')
1199 # Unify spelling
1200 line = line.replace('sub-license', 'sublicense')
1201 # Squash spaces
1115 line = oe.utils.squashspaces(line.strip()) 1202 line = oe.utils.squashspaces(line.strip())
1203 # Replace smart quotes, double quotes and backticks with single quotes
1116 line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'') 1204 line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'')
1205 # Unify brackets
1206 line = line.replace("{", "[").replace("}", "]")
1117 if line: 1207 if line:
1118 lictext.append(line) 1208 lictext.append(line)
1119 1209
@@ -1124,31 +1214,40 @@ def crunch_license(licfile):
1124 except UnicodeEncodeError: 1214 except UnicodeEncodeError:
1125 md5val = None 1215 md5val = None
1126 lictext = '' 1216 lictext = ''
1127 license = crunched_md5sums.get(md5val, None) 1217 return md5val, lictext
1128 return license, md5val, lictext
1129 1218
1130def guess_license(srctree, d): 1219def guess_license(srctree, d):
1131 import bb 1220 import bb
1132 md5sums = get_license_md5sums(d) 1221 md5sums = get_license_md5sums(d)
1133 1222
1223 crunched_md5sums = crunch_known_licenses(d)
1224
1134 licenses = [] 1225 licenses = []
1135 licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] 1226 licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10']
1227 skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go")
1136 licfiles = [] 1228 licfiles = []
1137 for root, dirs, files in os.walk(srctree): 1229 for root, dirs, files in os.walk(srctree):
1138 for fn in files: 1230 for fn in files:
1231 if fn.endswith(skip_extensions):
1232 continue
1139 for spec in licspecs: 1233 for spec in licspecs:
1140 if fnmatch.fnmatch(fn, spec): 1234 if fnmatch.fnmatch(fn, spec):
1141 fullpath = os.path.join(root, fn) 1235 fullpath = os.path.join(root, fn)
1142 if not fullpath in licfiles: 1236 if not fullpath in licfiles:
1143 licfiles.append(fullpath) 1237 licfiles.append(fullpath)
1144 for licfile in licfiles: 1238 for licfile in sorted(licfiles):
1145 md5value = bb.utils.md5_file(licfile) 1239 md5value = bb.utils.md5_file(licfile)
1146 license = md5sums.get(md5value, None) 1240 license = md5sums.get(md5value, None)
1147 if not license: 1241 if not license:
1148 license, crunched_md5, lictext = crunch_license(licfile) 1242 crunched_md5, lictext = crunch_license(licfile)
1149 if not license: 1243 license = crunched_md5sums.get(crunched_md5, None)
1244 if lictext and not license:
1150 license = 'Unknown' 1245 license = 'Unknown'
1151 licenses.append((license, os.path.relpath(licfile, srctree), md5value)) 1246 logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \
1247 "and replace `Unknown` with the license:\n" \
1248 "%s,Unknown" % (os.path.relpath(licfile, srctree), md5value))
1249 if license:
1250 licenses.append((license, os.path.relpath(licfile, srctree), md5value))
1152 1251
1153 # FIXME should we grab at least one source file with a license header and add that too? 1252 # FIXME should we grab at least one source file with a license header and add that too?
1154 1253
@@ -1162,6 +1261,7 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn
1162 """ 1261 """
1163 pkglicenses = {pn: []} 1262 pkglicenses = {pn: []}
1164 for license, licpath, _ in licvalues: 1263 for license, licpath, _ in licvalues:
1264 license = fixup_license(license)
1165 for pkgname, pkgpath in packages.items(): 1265 for pkgname, pkgpath in packages.items():
1166 if licpath.startswith(pkgpath + '/'): 1266 if licpath.startswith(pkgpath + '/'):
1167 if pkgname in pkglicenses: 1267 if pkgname in pkglicenses:
@@ -1174,11 +1274,14 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn
1174 pkglicenses[pn].append(license) 1274 pkglicenses[pn].append(license)
1175 outlicenses = {} 1275 outlicenses = {}
1176 for pkgname in packages: 1276 for pkgname in packages:
1177 license = ' '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown' 1277 # Assume AND operator between license files
1178 if license == 'Unknown' and pkgname in fallback_licenses: 1278 license = ' & '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown'
1279 if license == 'Unknown' and fallback_licenses and pkgname in fallback_licenses:
1179 license = fallback_licenses[pkgname] 1280 license = fallback_licenses[pkgname]
1180 outlines.append('LICENSE_%s = "%s"' % (pkgname, license)) 1281 licenses = tidy_licenses(license)
1181 outlicenses[pkgname] = license.split() 1282 license = ' & '.join(licenses)
1283 outlines.append('LICENSE:%s = "%s"' % (pkgname, license))
1284 outlicenses[pkgname] = licenses
1182 return outlicenses 1285 return outlicenses
1183 1286
1184def read_pkgconfig_provides(d): 1287def read_pkgconfig_provides(d):
@@ -1311,6 +1414,7 @@ def register_commands(subparsers):
1311 parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)') 1414 parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)')
1312 parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)') 1415 parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
1313 parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies') 1416 parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies')
1417 parser_create.add_argument('--no-pypi', action="store_true", help='Do not inherit pypi class')
1314 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) 1418 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS)
1315 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') 1419 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).')
1316 parser_create.set_defaults(func=create_recipe) 1420 parser_create.set_defaults(func=create_recipe)
diff --git a/scripts/lib/recipetool/create_buildsys.py b/scripts/lib/recipetool/create_buildsys.py
index 35a97c9345..ec9d510e23 100644
--- a/scripts/lib/recipetool/create_buildsys.py
+++ b/scripts/lib/recipetool/create_buildsys.py
@@ -5,9 +5,9 @@
5# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
6# 6#
7 7
8import os
8import re 9import re
9import logging 10import logging
10import glob
11from recipetool.create import RecipeHandler, validate_pv 11from recipetool.create import RecipeHandler, validate_pv
12 12
13logger = logging.getLogger('recipetool') 13logger = logging.getLogger('recipetool')
@@ -137,15 +137,15 @@ class CmakeRecipeHandler(RecipeHandler):
137 deps = [] 137 deps = []
138 unmappedpkgs = [] 138 unmappedpkgs = []
139 139
140 proj_re = re.compile('project\s*\(([^)]*)\)', re.IGNORECASE) 140 proj_re = re.compile(r'project\s*\(([^)]*)\)', re.IGNORECASE)
141 pkgcm_re = re.compile('pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE) 141 pkgcm_re = re.compile(r'pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE)
142 pkgsm_re = re.compile('pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE) 142 pkgsm_re = re.compile(r'pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE)
143 findpackage_re = re.compile('find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE) 143 findpackage_re = re.compile(r'find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE)
144 findlibrary_re = re.compile('find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*') 144 findlibrary_re = re.compile(r'find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*')
145 checklib_re = re.compile('check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE) 145 checklib_re = re.compile(r'check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE)
146 include_re = re.compile('include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE) 146 include_re = re.compile(r'include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE)
147 subdir_re = re.compile('add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE) 147 subdir_re = re.compile(r'add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE)
148 dep_re = re.compile('([^ ><=]+)( *[<>=]+ *[^ ><=]+)?') 148 dep_re = re.compile(r'([^ ><=]+)( *[<>=]+ *[^ ><=]+)?')
149 149
150 def find_cmake_package(pkg): 150 def find_cmake_package(pkg):
151 RecipeHandler.load_devel_filemap(tinfoil.config_data) 151 RecipeHandler.load_devel_filemap(tinfoil.config_data)
@@ -423,16 +423,16 @@ class AutotoolsRecipeHandler(RecipeHandler):
423 'makeinfo': 'texinfo', 423 'makeinfo': 'texinfo',
424 } 424 }
425 425
426 pkg_re = re.compile('PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*') 426 pkg_re = re.compile(r'PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
427 pkgce_re = re.compile('PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*') 427 pkgce_re = re.compile(r'PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*')
428 lib_re = re.compile('AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*') 428 lib_re = re.compile(r'AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*')
429 libx_re = re.compile('AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*') 429 libx_re = re.compile(r'AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*')
430 progs_re = re.compile('_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*') 430 progs_re = re.compile(r'_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
431 dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?') 431 dep_re = re.compile(r'([^ ><=]+)( [<>=]+ [^ ><=]+)?')
432 ac_init_re = re.compile('AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*') 432 ac_init_re = re.compile(r'AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*')
433 am_init_re = re.compile('AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*') 433 am_init_re = re.compile(r'AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*')
434 define_re = re.compile('\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)') 434 define_re = re.compile(r'\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)')
435 version_re = re.compile('([0-9.]+)') 435 version_re = re.compile(r'([0-9.]+)')
436 436
437 defines = {} 437 defines = {}
438 def subst_defines(value): 438 def subst_defines(value):
@@ -545,7 +545,7 @@ class AutotoolsRecipeHandler(RecipeHandler):
545 deps.append('zlib') 545 deps.append('zlib')
546 elif keyword in ('AX_CHECK_OPENSSL', 'AX_LIB_CRYPTO'): 546 elif keyword in ('AX_CHECK_OPENSSL', 'AX_LIB_CRYPTO'):
547 deps.append('openssl') 547 deps.append('openssl')
548 elif keyword == 'AX_LIB_CURL': 548 elif keyword in ('AX_LIB_CURL', 'LIBCURL_CHECK_CONFIG'):
549 deps.append('curl') 549 deps.append('curl')
550 elif keyword == 'AX_LIB_BEECRYPT': 550 elif keyword == 'AX_LIB_BEECRYPT':
551 deps.append('beecrypt') 551 deps.append('beecrypt')
@@ -624,6 +624,7 @@ class AutotoolsRecipeHandler(RecipeHandler):
624 'AX_CHECK_OPENSSL', 624 'AX_CHECK_OPENSSL',
625 'AX_LIB_CRYPTO', 625 'AX_LIB_CRYPTO',
626 'AX_LIB_CURL', 626 'AX_LIB_CURL',
627 'LIBCURL_CHECK_CONFIG',
627 'AX_LIB_BEECRYPT', 628 'AX_LIB_BEECRYPT',
628 'AX_LIB_EXPAT', 629 'AX_LIB_EXPAT',
629 'AX_LIB_GCRYPT', 630 'AX_LIB_GCRYPT',
diff --git a/scripts/lib/recipetool/create_buildsys_python.py b/scripts/lib/recipetool/create_buildsys_python.py
index adfa377956..a807dafae5 100644
--- a/scripts/lib/recipetool/create_buildsys_python.py
+++ b/scripts/lib/recipetool/create_buildsys_python.py
@@ -8,9 +8,9 @@
8import ast 8import ast
9import codecs 9import codecs
10import collections 10import collections
11import distutils.command.build_py 11import setuptools.command.build_py
12import email 12import email
13import imp 13import importlib
14import glob 14import glob
15import itertools 15import itertools
16import logging 16import logging
@@ -18,7 +18,11 @@ import os
18import re 18import re
19import sys 19import sys
20import subprocess 20import subprocess
21import json
22import urllib.request
21from recipetool.create import RecipeHandler 23from recipetool.create import RecipeHandler
24from urllib.parse import urldefrag
25from recipetool.create import determine_from_url
22 26
23logger = logging.getLogger('recipetool') 27logger = logging.getLogger('recipetool')
24 28
@@ -37,7 +41,334 @@ class PythonRecipeHandler(RecipeHandler):
37 assume_provided = ['builtins', 'os.path'] 41 assume_provided = ['builtins', 'os.path']
38 # Assumes that the host python3 builtin_module_names is sane for target too 42 # Assumes that the host python3 builtin_module_names is sane for target too
39 assume_provided = assume_provided + list(sys.builtin_module_names) 43 assume_provided = assume_provided + list(sys.builtin_module_names)
44 excluded_fields = []
40 45
46
47 classifier_license_map = {
48 'License :: OSI Approved :: Academic Free License (AFL)': 'AFL',
49 'License :: OSI Approved :: Apache Software License': 'Apache',
50 'License :: OSI Approved :: Apple Public Source License': 'APSL',
51 'License :: OSI Approved :: Artistic License': 'Artistic',
52 'License :: OSI Approved :: Attribution Assurance License': 'AAL',
53 'License :: OSI Approved :: BSD License': 'BSD-3-Clause',
54 'License :: OSI Approved :: Boost Software License 1.0 (BSL-1.0)': 'BSL-1.0',
55 'License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)': 'CECILL-2.1',
56 'License :: OSI Approved :: Common Development and Distribution License 1.0 (CDDL-1.0)': 'CDDL-1.0',
57 'License :: OSI Approved :: Common Public License': 'CPL',
58 'License :: OSI Approved :: Eclipse Public License 1.0 (EPL-1.0)': 'EPL-1.0',
59 'License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)': 'EPL-2.0',
60 'License :: OSI Approved :: Eiffel Forum License': 'EFL',
61 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0',
62 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1',
63 'License :: OSI Approved :: European Union Public Licence 1.2 (EUPL 1.2)': 'EUPL-1.2',
64 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0-only',
65 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0-or-later',
66 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL',
67 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL',
68 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0-only',
69 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0-or-later',
70 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0-only',
71 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0-or-later',
72 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0-only',
73 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0-or-later',
74 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0-only',
75 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0-or-later',
76 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL',
77 'License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)': 'HPND',
78 'License :: OSI Approved :: IBM Public License': 'IPL',
79 'License :: OSI Approved :: ISC License (ISCL)': 'ISC',
80 'License :: OSI Approved :: Intel Open Source License': 'Intel',
81 'License :: OSI Approved :: Jabber Open Source License': 'Jabber',
82 'License :: OSI Approved :: MIT License': 'MIT',
83 'License :: OSI Approved :: MIT No Attribution License (MIT-0)': 'MIT-0',
84 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL',
85 'License :: OSI Approved :: MirOS License (MirOS)': 'MirOS',
86 'License :: OSI Approved :: Motosoto License': 'Motosoto',
87 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0',
88 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1',
89 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0',
90 'License :: OSI Approved :: Nethack General Public License': 'NGPL',
91 'License :: OSI Approved :: Nokia Open Source License': 'Nokia',
92 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL',
93 'License :: OSI Approved :: Open Software License 3.0 (OSL-3.0)': 'OSL-3.0',
94 'License :: OSI Approved :: PostgreSQL License': 'PostgreSQL',
95 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python',
96 'License :: OSI Approved :: Python Software Foundation License': 'PSF-2.0',
97 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL',
98 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL',
99 'License :: OSI Approved :: SIL Open Font License 1.1 (OFL-1.1)': 'OFL-1.1',
100 'License :: OSI Approved :: Sleepycat License': 'Sleepycat',
101 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': 'SISSL',
102 'License :: OSI Approved :: Sun Public License': 'SPL',
103 'License :: OSI Approved :: The Unlicense (Unlicense)': 'Unlicense',
104 'License :: OSI Approved :: Universal Permissive License (UPL)': 'UPL-1.0',
105 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA',
106 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0',
107 'License :: OSI Approved :: W3C License': 'W3C',
108 'License :: OSI Approved :: X.Net License': 'Xnet',
109 'License :: OSI Approved :: Zope Public License': 'ZPL',
110 'License :: OSI Approved :: zlib/libpng License': 'Zlib',
111 'License :: Other/Proprietary License': 'Proprietary',
112 'License :: Public Domain': 'PD',
113 }
114
115 def __init__(self):
116 pass
117
118 def process_url(self, args, classes, handled, extravalues):
119 """
120 Convert any pypi url https://pypi.org/project/<package>/<version> into https://files.pythonhosted.org/packages/source/...
121 which corresponds to the archive location, and add pypi class
122 """
123
124 if 'url' in handled:
125 return None
126
127 fetch_uri = None
128 source = args.source
129 required_version = args.version if args.version else None
130 match = re.match(r'https?://pypi.org/project/([^/]+)(?:/([^/]+))?/?$', urldefrag(source)[0])
131 if match:
132 package = match.group(1)
133 version = match.group(2) if match.group(2) else required_version
134
135 json_url = f"https://pypi.org/pypi/%s/json" % package
136 response = urllib.request.urlopen(json_url)
137 if response.status == 200:
138 data = json.loads(response.read())
139 if not version:
140 # grab latest version
141 version = data["info"]["version"]
142 pypi_package = data["info"]["name"]
143 for release in reversed(data["releases"][version]):
144 if release["packagetype"] == "sdist":
145 fetch_uri = release["url"]
146 break
147 else:
148 logger.warning("Cannot handle pypi url %s: cannot fetch package information using %s", source, json_url)
149 return None
150 else:
151 match = re.match(r'^https?://files.pythonhosted.org/packages.*/(.*)-.*$', source)
152 if match:
153 fetch_uri = source
154 pypi_package = match.group(1)
155 _, version = determine_from_url(fetch_uri)
156
157 if match and not args.no_pypi:
158 if required_version and version != required_version:
159 raise Exception("Version specified using --version/-V (%s) and version specified in the url (%s) do not match" % (required_version, version))
160 # This is optionnal if BPN looks like "python-<pypi_package>" or "python3-<pypi_package>" (see pypi.bbclass)
161 # but at this point we cannot know because because user can specify the output name of the recipe on the command line
162 extravalues["PYPI_PACKAGE"] = pypi_package
163 # If the tarball extension is not 'tar.gz' (default value in pypi.bblcass) whe should set PYPI_PACKAGE_EXT in the recipe
164 pypi_package_ext = re.match(r'.*%s-%s\.(.*)$' % (pypi_package, version), fetch_uri)
165 if pypi_package_ext:
166 pypi_package_ext = pypi_package_ext.group(1)
167 if pypi_package_ext != "tar.gz":
168 extravalues["PYPI_PACKAGE_EXT"] = pypi_package_ext
169
170 # Pypi class will handle S and SRC_URI variables, so remove them
171 # TODO: allow oe.recipeutils.patch_recipe_lines() to accept regexp so we can simplify the following to:
172 # extravalues['SRC_URI(?:\[.*?\])?'] = None
173 extravalues['S'] = None
174 extravalues['SRC_URI'] = None
175
176 classes.append('pypi')
177
178 handled.append('url')
179 return fetch_uri
180
181 def handle_classifier_license(self, classifiers, existing_licenses=""):
182
183 licenses = []
184 for classifier in classifiers:
185 if classifier in self.classifier_license_map:
186 license = self.classifier_license_map[classifier]
187 if license == 'Apache' and 'Apache-2.0' in existing_licenses:
188 license = 'Apache-2.0'
189 elif license == 'GPL':
190 if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses:
191 license = 'GPL-2.0'
192 elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses:
193 license = 'GPL-3.0'
194 elif license == 'LGPL':
195 if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses:
196 license = 'LGPL-2.1'
197 elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses:
198 license = 'LGPL-2.0'
199 elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses:
200 license = 'LGPL-3.0'
201 licenses.append(license)
202
203 if licenses:
204 return ' & '.join(licenses)
205
206 return None
207
208 def map_info_to_bbvar(self, info, extravalues):
209
210 # Map PKG-INFO & setup.py fields to bitbake variables
211 for field, values in info.items():
212 if field in self.excluded_fields:
213 continue
214
215 if field not in self.bbvar_map:
216 continue
217
218 if isinstance(values, str):
219 value = values
220 else:
221 value = ' '.join(str(v) for v in values if v)
222
223 bbvar = self.bbvar_map[field]
224 if bbvar == "PN":
225 # by convention python recipes start with "python3-"
226 if not value.startswith('python'):
227 value = 'python3-' + value
228
229 if bbvar not in extravalues and value:
230 extravalues[bbvar] = value
231
232 def apply_info_replacements(self, info):
233 if not self.replacements:
234 return
235
236 for variable, search, replace in self.replacements:
237 if variable not in info:
238 continue
239
240 def replace_value(search, replace, value):
241 if replace is None:
242 if re.search(search, value):
243 return None
244 else:
245 new_value = re.sub(search, replace, value)
246 if value != new_value:
247 return new_value
248 return value
249
250 value = info[variable]
251 if isinstance(value, str):
252 new_value = replace_value(search, replace, value)
253 if new_value is None:
254 del info[variable]
255 elif new_value != value:
256 info[variable] = new_value
257 elif hasattr(value, 'items'):
258 for dkey, dvalue in list(value.items()):
259 new_list = []
260 for pos, a_value in enumerate(dvalue):
261 new_value = replace_value(search, replace, a_value)
262 if new_value is not None and new_value != value:
263 new_list.append(new_value)
264
265 if value != new_list:
266 value[dkey] = new_list
267 else:
268 new_list = []
269 for pos, a_value in enumerate(value):
270 new_value = replace_value(search, replace, a_value)
271 if new_value is not None and new_value != value:
272 new_list.append(new_value)
273
274 if value != new_list:
275 info[variable] = new_list
276
277
278 def scan_python_dependencies(self, paths):
279 deps = set()
280 try:
281 dep_output = self.run_command(['pythondeps', '-d'] + paths)
282 except (OSError, subprocess.CalledProcessError):
283 pass
284 else:
285 for line in dep_output.splitlines():
286 line = line.rstrip()
287 dep, filename = line.split('\t', 1)
288 if filename.endswith('/setup.py'):
289 continue
290 deps.add(dep)
291
292 try:
293 provides_output = self.run_command(['pythondeps', '-p'] + paths)
294 except (OSError, subprocess.CalledProcessError):
295 pass
296 else:
297 provides_lines = (l.rstrip() for l in provides_output.splitlines())
298 provides = set(l for l in provides_lines if l and l != 'setup')
299 deps -= provides
300
301 return deps
302
303 def parse_pkgdata_for_python_packages(self):
304 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
305
306 ldata = tinfoil.config_data.createCopy()
307 bb.parse.handle('classes-recipe/python3-dir.bbclass', ldata, True)
308 python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
309
310 dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload')
311 python_dirs = [python_sitedir + os.sep,
312 os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep,
313 os.path.dirname(python_sitedir) + os.sep]
314 packages = {}
315 for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)):
316 files_info = None
317 with open(pkgdatafile, 'r') as f:
318 for line in f.readlines():
319 field, value = line.split(': ', 1)
320 if field.startswith('FILES_INFO'):
321 files_info = ast.literal_eval(value)
322 break
323 else:
324 continue
325
326 for fn in files_info:
327 for suffix in importlib.machinery.all_suffixes():
328 if fn.endswith(suffix):
329 break
330 else:
331 continue
332
333 if fn.startswith(dynload_dir + os.sep):
334 if '/.debug/' in fn:
335 continue
336 base = os.path.basename(fn)
337 provided = base.split('.', 1)[0]
338 packages[provided] = os.path.basename(pkgdatafile)
339 continue
340
341 for python_dir in python_dirs:
342 if fn.startswith(python_dir):
343 relpath = fn[len(python_dir):]
344 relstart, _, relremaining = relpath.partition(os.sep)
345 if relstart.endswith('.egg'):
346 relpath = relremaining
347 base, _ = os.path.splitext(relpath)
348
349 if '/.debug/' in base:
350 continue
351 if os.path.basename(base) == '__init__':
352 base = os.path.dirname(base)
353 base = base.replace(os.sep + os.sep, os.sep)
354 provided = base.replace(os.sep, '.')
355 packages[provided] = os.path.basename(pkgdatafile)
356 return packages
357
358 @classmethod
359 def run_command(cls, cmd, **popenargs):
360 if 'stderr' not in popenargs:
361 popenargs['stderr'] = subprocess.STDOUT
362 try:
363 return subprocess.check_output(cmd, **popenargs).decode('utf-8')
364 except OSError as exc:
365 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc)
366 raise
367 except subprocess.CalledProcessError as exc:
368 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output)
369 raise
370
371class PythonSetupPyRecipeHandler(PythonRecipeHandler):
41 bbvar_map = { 372 bbvar_map = {
42 'Name': 'PN', 373 'Name': 'PN',
43 'Version': 'PV', 374 'Version': 'PV',
@@ -45,9 +376,9 @@ class PythonRecipeHandler(RecipeHandler):
45 'Summary': 'SUMMARY', 376 'Summary': 'SUMMARY',
46 'Description': 'DESCRIPTION', 377 'Description': 'DESCRIPTION',
47 'License': 'LICENSE', 378 'License': 'LICENSE',
48 'Requires': 'RDEPENDS_${PN}', 379 'Requires': 'RDEPENDS:${PN}',
49 'Provides': 'RPROVIDES_${PN}', 380 'Provides': 'RPROVIDES:${PN}',
50 'Obsoletes': 'RREPLACES_${PN}', 381 'Obsoletes': 'RREPLACES:${PN}',
51 } 382 }
52 # PN/PV are already set by recipetool core & desc can be extremely long 383 # PN/PV are already set by recipetool core & desc can be extremely long
53 excluded_fields = [ 384 excluded_fields = [
@@ -75,6 +406,7 @@ class PythonRecipeHandler(RecipeHandler):
75 'Supported-Platform', 406 'Supported-Platform',
76 ] 407 ]
77 setuparg_multi_line_values = ['Description'] 408 setuparg_multi_line_values = ['Description']
409
78 replacements = [ 410 replacements = [
79 ('License', r' +$', ''), 411 ('License', r' +$', ''),
80 ('License', r'^ +', ''), 412 ('License', r'^ +', ''),
@@ -95,71 +427,161 @@ class PythonRecipeHandler(RecipeHandler):
95 ('Install-requires', r'\[[^\]]+\]$', ''), 427 ('Install-requires', r'\[[^\]]+\]$', ''),
96 ] 428 ]
97 429
98 classifier_license_map = {
99 'License :: OSI Approved :: Academic Free License (AFL)': 'AFL',
100 'License :: OSI Approved :: Apache Software License': 'Apache',
101 'License :: OSI Approved :: Apple Public Source License': 'APSL',
102 'License :: OSI Approved :: Artistic License': 'Artistic',
103 'License :: OSI Approved :: Attribution Assurance License': 'AAL',
104 'License :: OSI Approved :: BSD License': 'BSD',
105 'License :: OSI Approved :: Common Public License': 'CPL',
106 'License :: OSI Approved :: Eiffel Forum License': 'EFL',
107 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0',
108 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1',
109 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0+',
110 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0',
111 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL',
112 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL',
113 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0',
114 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0+',
115 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0',
116 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0+',
117 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0',
118 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0+',
119 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0',
120 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0+',
121 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL',
122 'License :: OSI Approved :: IBM Public License': 'IPL',
123 'License :: OSI Approved :: ISC License (ISCL)': 'ISC',
124 'License :: OSI Approved :: Intel Open Source License': 'Intel',
125 'License :: OSI Approved :: Jabber Open Source License': 'Jabber',
126 'License :: OSI Approved :: MIT License': 'MIT',
127 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL',
128 'License :: OSI Approved :: Motosoto License': 'Motosoto',
129 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0',
130 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1',
131 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0',
132 'License :: OSI Approved :: Nethack General Public License': 'NGPL',
133 'License :: OSI Approved :: Nokia Open Source License': 'Nokia',
134 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL',
135 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python',
136 'License :: OSI Approved :: Python Software Foundation License': 'PSF',
137 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL',
138 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL',
139 'License :: OSI Approved :: Sleepycat License': 'Sleepycat',
140 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': '-- Sun Industry Standards Source License (SISSL)',
141 'License :: OSI Approved :: Sun Public License': 'SPL',
142 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA',
143 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0',
144 'License :: OSI Approved :: W3C License': 'W3C',
145 'License :: OSI Approved :: X.Net License': 'Xnet',
146 'License :: OSI Approved :: Zope Public License': 'ZPL',
147 'License :: OSI Approved :: zlib/libpng License': 'Zlib',
148 }
149
150 def __init__(self): 430 def __init__(self):
151 pass 431 pass
152 432
433 def parse_setup_py(self, setupscript='./setup.py'):
434 with codecs.open(setupscript) as f:
435 info, imported_modules, non_literals, extensions = gather_setup_info(f)
436
437 def _map(key):
438 key = key.replace('_', '-')
439 key = key[0].upper() + key[1:]
440 if key in self.setup_parse_map:
441 key = self.setup_parse_map[key]
442 return key
443
444 # Naive mapping of setup() arguments to PKG-INFO field names
445 for d in [info, non_literals]:
446 for key, value in list(d.items()):
447 if key is None:
448 continue
449 new_key = _map(key)
450 if new_key != key:
451 del d[key]
452 d[new_key] = value
453
454 return info, 'setuptools' in imported_modules, non_literals, extensions
455
456 def get_setup_args_info(self, setupscript='./setup.py'):
457 cmd = ['python3', setupscript]
458 info = {}
459 keys = set(self.bbvar_map.keys())
460 keys |= set(self.setuparg_list_fields)
461 keys |= set(self.setuparg_multi_line_values)
462 grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values))
463 for index, keys in grouped_keys:
464 if index == (True, False):
465 # Splitlines output for each arg as a list value
466 for key in keys:
467 arg = self.setuparg_map.get(key, key.lower())
468 try:
469 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
470 except (OSError, subprocess.CalledProcessError):
471 pass
472 else:
473 info[key] = [l.rstrip() for l in arg_info.splitlines()]
474 elif index == (False, True):
475 # Entire output for each arg
476 for key in keys:
477 arg = self.setuparg_map.get(key, key.lower())
478 try:
479 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
480 except (OSError, subprocess.CalledProcessError):
481 pass
482 else:
483 info[key] = arg_info
484 else:
485 info.update(self.get_setup_byline(list(keys), setupscript))
486 return info
487
488 def get_setup_byline(self, fields, setupscript='./setup.py'):
489 info = {}
490
491 cmd = ['python3', setupscript]
492 cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields)
493 try:
494 info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines()
495 except (OSError, subprocess.CalledProcessError):
496 pass
497 else:
498 if len(fields) != len(info_lines):
499 logger.error('Mismatch between setup.py output lines and number of fields')
500 sys.exit(1)
501
502 for lineno, line in enumerate(info_lines):
503 line = line.rstrip()
504 info[fields[lineno]] = line
505 return info
506
507 def get_pkginfo(self, pkginfo_fn):
508 msg = email.message_from_file(open(pkginfo_fn, 'r'))
509 msginfo = {}
510 for field in msg.keys():
511 values = msg.get_all(field)
512 if len(values) == 1:
513 msginfo[field] = values[0]
514 else:
515 msginfo[field] = values
516 return msginfo
517
518 def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals):
519 if 'Package-dir' in setup_info:
520 package_dir = setup_info['Package-dir']
521 else:
522 package_dir = {}
523
524 dist = setuptools.Distribution()
525
526 class PackageDir(setuptools.command.build_py.build_py):
527 def __init__(self, package_dir):
528 self.package_dir = package_dir
529 self.dist = dist
530 super().__init__(self.dist)
531
532 pd = PackageDir(package_dir)
533 to_scan = []
534 if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']):
535 if 'Py-modules' in setup_info:
536 for module in setup_info['Py-modules']:
537 try:
538 package, module = module.rsplit('.', 1)
539 except ValueError:
540 package, module = '.', module
541 module_path = os.path.join(pd.get_package_dir(package), module + '.py')
542 to_scan.append(module_path)
543
544 if 'Packages' in setup_info:
545 for package in setup_info['Packages']:
546 to_scan.append(pd.get_package_dir(package))
547
548 if 'Scripts' in setup_info:
549 to_scan.extend(setup_info['Scripts'])
550 else:
551 logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.")
552
553 if not to_scan:
554 to_scan = ['.']
555
556 logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan))
557
558 provided_packages = self.parse_pkgdata_for_python_packages()
559 scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan])
560 mapped_deps, unmapped_deps = set(self.base_pkgdeps), set()
561 for dep in scanned_deps:
562 mapped = provided_packages.get(dep)
563 if mapped:
564 logger.debug('Mapped %s to %s' % (dep, mapped))
565 mapped_deps.add(mapped)
566 else:
567 logger.debug('Could not map %s' % dep)
568 unmapped_deps.add(dep)
569 return mapped_deps, unmapped_deps
570
153 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): 571 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
572
154 if 'buildsystem' in handled: 573 if 'buildsystem' in handled:
155 return False 574 return False
156 575
576 logger.debug("Trying setup.py parser")
577
157 # Check for non-zero size setup.py files 578 # Check for non-zero size setup.py files
158 setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py']) 579 setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py'])
159 for fn in setupfiles: 580 for fn in setupfiles:
160 if os.path.getsize(fn): 581 if os.path.getsize(fn):
161 break 582 break
162 else: 583 else:
584 logger.debug("No setup.py found")
163 return False 585 return False
164 586
165 # setup.py is always parsed to get at certain required information, such as 587 # setup.py is always parsed to get at certain required information, such as
@@ -193,6 +615,18 @@ class PythonRecipeHandler(RecipeHandler):
193 continue 615 continue
194 616
195 if line.startswith('['): 617 if line.startswith('['):
618 # PACKAGECONFIG must not contain expressions or whitespace
619 line = line.replace(" ", "")
620 line = line.replace(':', "")
621 line = line.replace('.', "-dot-")
622 line = line.replace('"', "")
623 line = line.replace('<', "-smaller-")
624 line = line.replace('>', "-bigger-")
625 line = line.replace('_', "-")
626 line = line.replace('(', "")
627 line = line.replace(')', "")
628 line = line.replace('!', "-not-")
629 line = line.replace('=', "-equals-")
196 current_feature = line[1:-1] 630 current_feature = line[1:-1]
197 elif current_feature: 631 elif current_feature:
198 extras_req[current_feature].append(line) 632 extras_req[current_feature].append(line)
@@ -226,51 +660,16 @@ class PythonRecipeHandler(RecipeHandler):
226 660
227 if license_str: 661 if license_str:
228 for i, line in enumerate(lines_before): 662 for i, line in enumerate(lines_before):
229 if line.startswith('LICENSE = '): 663 if line.startswith('##LICENSE_PLACEHOLDER##'):
230 lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str) 664 lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str)
231 break 665 break
232 666
233 if 'Classifier' in info: 667 if 'Classifier' in info:
234 existing_licenses = info.get('License', '') 668 license = self.handle_classifier_license(info['Classifier'], info.get('License', ''))
235 licenses = [] 669 if license:
236 for classifier in info['Classifier']: 670 info['License'] = license
237 if classifier in self.classifier_license_map:
238 license = self.classifier_license_map[classifier]
239 if license == 'Apache' and 'Apache-2.0' in existing_licenses:
240 license = 'Apache-2.0'
241 elif license == 'GPL':
242 if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses:
243 license = 'GPL-2.0'
244 elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses:
245 license = 'GPL-3.0'
246 elif license == 'LGPL':
247 if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses:
248 license = 'LGPL-2.1'
249 elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses:
250 license = 'LGPL-2.0'
251 elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses:
252 license = 'LGPL-3.0'
253 licenses.append(license)
254
255 if licenses:
256 info['License'] = ' & '.join(licenses)
257 671
258 # Map PKG-INFO & setup.py fields to bitbake variables 672 self.map_info_to_bbvar(info, extravalues)
259 for field, values in info.items():
260 if field in self.excluded_fields:
261 continue
262
263 if field not in self.bbvar_map:
264 continue
265
266 if isinstance(values, str):
267 value = values
268 else:
269 value = ' '.join(str(v) for v in values if v)
270
271 bbvar = self.bbvar_map[field]
272 if bbvar not in extravalues and value:
273 extravalues[bbvar] = value
274 673
275 mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals) 674 mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals)
276 675
@@ -281,6 +680,7 @@ class PythonRecipeHandler(RecipeHandler):
281 lines_after.append('# The following configs & dependencies are from setuptools extras_require.') 680 lines_after.append('# The following configs & dependencies are from setuptools extras_require.')
282 lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.') 681 lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.')
283 lines_after.append('# The upstream names may not correspond exactly to bitbake package names.') 682 lines_after.append('# The upstream names may not correspond exactly to bitbake package names.')
683 lines_after.append('# The configs are might not correct, since PACKAGECONFIG does not support expressions as may used in requires.txt - they are just replaced by text.')
284 lines_after.append('#') 684 lines_after.append('#')
285 lines_after.append('# Uncomment this line to enable all the optional features.') 685 lines_after.append('# Uncomment this line to enable all the optional features.')
286 lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req))) 686 lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req)))
@@ -301,7 +701,7 @@ class PythonRecipeHandler(RecipeHandler):
301 inst_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(inst_reqs)) 701 inst_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(inst_reqs))
302 lines_after.append('# WARNING: the following rdepends are from setuptools install_requires. These') 702 lines_after.append('# WARNING: the following rdepends are from setuptools install_requires. These')
303 lines_after.append('# upstream names may not correspond exactly to bitbake package names.') 703 lines_after.append('# upstream names may not correspond exactly to bitbake package names.')
304 lines_after.append('RDEPENDS_${{PN}} += "{}"'.format(' '.join(inst_req_deps))) 704 lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(inst_req_deps)))
305 705
306 if mapped_deps: 706 if mapped_deps:
307 name = info.get('Name') 707 name = info.get('Name')
@@ -313,7 +713,7 @@ class PythonRecipeHandler(RecipeHandler):
313 lines_after.append('') 713 lines_after.append('')
314 lines_after.append('# WARNING: the following rdepends are determined through basic analysis of the') 714 lines_after.append('# WARNING: the following rdepends are determined through basic analysis of the')
315 lines_after.append('# python sources, and might not be 100% accurate.') 715 lines_after.append('# python sources, and might not be 100% accurate.')
316 lines_after.append('RDEPENDS_${{PN}} += "{}"'.format(' '.join(sorted(mapped_deps)))) 716 lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(sorted(mapped_deps))))
317 717
318 unmapped_deps -= set(extensions) 718 unmapped_deps -= set(extensions)
319 unmapped_deps -= set(self.assume_provided) 719 unmapped_deps -= set(self.assume_provided)
@@ -326,275 +726,283 @@ class PythonRecipeHandler(RecipeHandler):
326 726
327 handled.append('buildsystem') 727 handled.append('buildsystem')
328 728
329 def get_pkginfo(self, pkginfo_fn): 729class PythonPyprojectTomlRecipeHandler(PythonRecipeHandler):
330 msg = email.message_from_file(open(pkginfo_fn, 'r')) 730 """Base class to support PEP517 and PEP518
331 msginfo = {} 731
332 for field in msg.keys(): 732 PEP517 https://peps.python.org/pep-0517/#source-trees
333 values = msg.get_all(field) 733 PEP518 https://peps.python.org/pep-0518/#build-system-table
334 if len(values) == 1: 734 """
335 msginfo[field] = values[0] 735 # bitbake currently supports the 4 following backends
336 else: 736 build_backend_map = {
337 msginfo[field] = values 737 "setuptools.build_meta": "python_setuptools_build_meta",
338 return msginfo 738 "poetry.core.masonry.api": "python_poetry_core",
739 "flit_core.buildapi": "python_flit_core",
740 "hatchling.build": "python_hatchling",
741 "maturin": "python_maturin",
742 "mesonpy": "python_mesonpy",
743 }
339 744
340 def parse_setup_py(self, setupscript='./setup.py'): 745 # setuptools.build_meta and flit declare project metadata into the "project" section of pyproject.toml
341 with codecs.open(setupscript) as f: 746 # according to PEP-621: https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#declaring-project-metadata
342 info, imported_modules, non_literals, extensions = gather_setup_info(f) 747 # while poetry uses the "tool.poetry" section according to its official documentation: https://python-poetry.org/docs/pyproject/
748 # keys from "project" and "tool.poetry" sections are almost the same except for the HOMEPAGE which is "homepage" for tool.poetry
749 # and "Homepage" for "project" section. So keep both
750 bbvar_map = {
751 "name": "PN",
752 "version": "PV",
753 "Homepage": "HOMEPAGE",
754 "homepage": "HOMEPAGE",
755 "description": "SUMMARY",
756 "license": "LICENSE",
757 "dependencies": "RDEPENDS:${PN}",
758 "requires": "DEPENDS",
759 }
343 760
344 def _map(key): 761 replacements = [
345 key = key.replace('_', '-') 762 ("license", r" +$", ""),
346 key = key[0].upper() + key[1:] 763 ("license", r"^ +", ""),
347 if key in self.setup_parse_map: 764 ("license", r" ", "-"),
348 key = self.setup_parse_map[key] 765 ("license", r"^GNU-", ""),
349 return key 766 ("license", r"-[Ll]icen[cs]e(,?-[Vv]ersion)?", ""),
767 ("license", r"^UNKNOWN$", ""),
768 # Remove currently unhandled version numbers from these variables
769 ("requires", r"\[[^\]]+\]$", ""),
770 ("requires", r"^([^><= ]+).*", r"\1"),
771 ("dependencies", r"\[[^\]]+\]$", ""),
772 ("dependencies", r"^([^><= ]+).*", r"\1"),
773 ]
350 774
351 # Naive mapping of setup() arguments to PKG-INFO field names 775 excluded_native_pkgdeps = [
352 for d in [info, non_literals]: 776 # already provided by python_setuptools_build_meta.bbclass
353 for key, value in list(d.items()): 777 "python3-setuptools-native",
354 if key is None: 778 "python3-wheel-native",
355 continue 779 # already provided by python_poetry_core.bbclass
356 new_key = _map(key) 780 "python3-poetry-core-native",
357 if new_key != key: 781 # already provided by python_flit_core.bbclass
358 del d[key] 782 "python3-flit-core-native",
359 d[new_key] = value 783 # already provided by python_mesonpy
784 "python3-meson-python-native",
785 ]
360 786
361 return info, 'setuptools' in imported_modules, non_literals, extensions 787 # add here a list of known and often used packages and the corresponding bitbake package
788 known_deps_map = {
789 "setuptools": "python3-setuptools",
790 "wheel": "python3-wheel",
791 "poetry-core": "python3-poetry-core",
792 "flit_core": "python3-flit-core",
793 "setuptools-scm": "python3-setuptools-scm",
794 "hatchling": "python3-hatchling",
795 "hatch-vcs": "python3-hatch-vcs",
796 "meson-python" : "python3-meson-python",
797 }
362 798
363 def get_setup_args_info(self, setupscript='./setup.py'): 799 def __init__(self):
364 cmd = ['python3', setupscript] 800 pass
365 info = {}
366 keys = set(self.bbvar_map.keys())
367 keys |= set(self.setuparg_list_fields)
368 keys |= set(self.setuparg_multi_line_values)
369 grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values))
370 for index, keys in grouped_keys:
371 if index == (True, False):
372 # Splitlines output for each arg as a list value
373 for key in keys:
374 arg = self.setuparg_map.get(key, key.lower())
375 try:
376 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
377 except (OSError, subprocess.CalledProcessError):
378 pass
379 else:
380 info[key] = [l.rstrip() for l in arg_info.splitlines()]
381 elif index == (False, True):
382 # Entire output for each arg
383 for key in keys:
384 arg = self.setuparg_map.get(key, key.lower())
385 try:
386 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
387 except (OSError, subprocess.CalledProcessError):
388 pass
389 else:
390 info[key] = arg_info
391 else:
392 info.update(self.get_setup_byline(list(keys), setupscript))
393 return info
394 801
395 def get_setup_byline(self, fields, setupscript='./setup.py'): 802 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
396 info = {} 803 info = {}
804 metadata = {}
397 805
398 cmd = ['python3', setupscript] 806 if 'buildsystem' in handled:
399 cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields) 807 return False
400 try:
401 info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines()
402 except (OSError, subprocess.CalledProcessError):
403 pass
404 else:
405 if len(fields) != len(info_lines):
406 logger.error('Mismatch between setup.py output lines and number of fields')
407 sys.exit(1)
408
409 for lineno, line in enumerate(info_lines):
410 line = line.rstrip()
411 info[fields[lineno]] = line
412 return info
413
414 def apply_info_replacements(self, info):
415 for variable, search, replace in self.replacements:
416 if variable not in info:
417 continue
418
419 def replace_value(search, replace, value):
420 if replace is None:
421 if re.search(search, value):
422 return None
423 else:
424 new_value = re.sub(search, replace, value)
425 if value != new_value:
426 return new_value
427 return value
428
429 value = info[variable]
430 if isinstance(value, str):
431 new_value = replace_value(search, replace, value)
432 if new_value is None:
433 del info[variable]
434 elif new_value != value:
435 info[variable] = new_value
436 elif hasattr(value, 'items'):
437 for dkey, dvalue in list(value.items()):
438 new_list = []
439 for pos, a_value in enumerate(dvalue):
440 new_value = replace_value(search, replace, a_value)
441 if new_value is not None and new_value != value:
442 new_list.append(new_value)
443
444 if value != new_list:
445 value[dkey] = new_list
446 else:
447 new_list = []
448 for pos, a_value in enumerate(value):
449 new_value = replace_value(search, replace, a_value)
450 if new_value is not None and new_value != value:
451 new_list.append(new_value)
452
453 if value != new_list:
454 info[variable] = new_list
455
456 def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals):
457 if 'Package-dir' in setup_info:
458 package_dir = setup_info['Package-dir']
459 else:
460 package_dir = {}
461
462 class PackageDir(distutils.command.build_py.build_py):
463 def __init__(self, package_dir):
464 self.package_dir = package_dir
465
466 pd = PackageDir(package_dir)
467 to_scan = []
468 if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']):
469 if 'Py-modules' in setup_info:
470 for module in setup_info['Py-modules']:
471 try:
472 package, module = module.rsplit('.', 1)
473 except ValueError:
474 package, module = '.', module
475 module_path = os.path.join(pd.get_package_dir(package), module + '.py')
476 to_scan.append(module_path)
477 808
478 if 'Packages' in setup_info: 809 logger.debug("Trying pyproject.toml parser")
479 for package in setup_info['Packages']:
480 to_scan.append(pd.get_package_dir(package))
481 810
482 if 'Scripts' in setup_info: 811 # Check for non-zero size setup.py files
483 to_scan.extend(setup_info['Scripts']) 812 setupfiles = RecipeHandler.checkfiles(srctree, ["pyproject.toml"])
813 for fn in setupfiles:
814 if os.path.getsize(fn):
815 break
484 else: 816 else:
485 logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.") 817 logger.debug("No pyproject.toml found")
486 818 return False
487 if not to_scan:
488 to_scan = ['.']
489
490 logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan))
491 819
492 provided_packages = self.parse_pkgdata_for_python_packages() 820 setupscript = os.path.join(srctree, "pyproject.toml")
493 scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan])
494 mapped_deps, unmapped_deps = set(self.base_pkgdeps), set()
495 for dep in scanned_deps:
496 mapped = provided_packages.get(dep)
497 if mapped:
498 logger.debug('Mapped %s to %s' % (dep, mapped))
499 mapped_deps.add(mapped)
500 else:
501 logger.debug('Could not map %s' % dep)
502 unmapped_deps.add(dep)
503 return mapped_deps, unmapped_deps
504 821
505 def scan_python_dependencies(self, paths):
506 deps = set()
507 try: 822 try:
508 dep_output = self.run_command(['pythondeps', '-d'] + paths) 823 try:
509 except (OSError, subprocess.CalledProcessError): 824 import tomllib
510 pass 825 except ImportError:
511 else: 826 try:
512 for line in dep_output.splitlines(): 827 import tomli as tomllib
513 line = line.rstrip() 828 except ImportError:
514 dep, filename = line.split('\t', 1) 829 logger.error("Neither 'tomllib' nor 'tomli' could be imported, cannot scan pyproject.toml.")
515 if filename.endswith('/setup.py'): 830 return False
516 continue 831
517 deps.add(dep) 832 try:
833 with open(setupscript, "rb") as f:
834 config = tomllib.load(f)
835 except Exception:
836 logger.exception("Failed to parse pyproject.toml")
837 return False
838
839 build_backend = config["build-system"]["build-backend"]
840 if build_backend in self.build_backend_map:
841 classes.append(self.build_backend_map[build_backend])
842 else:
843 logger.error(
844 "Unsupported build-backend: %s, cannot use pyproject.toml. Will try to use legacy setup.py"
845 % build_backend
846 )
847 return False
518 848
519 try: 849 licfile = ""
520 provides_output = self.run_command(['pythondeps', '-p'] + paths)
521 except (OSError, subprocess.CalledProcessError):
522 pass
523 else:
524 provides_lines = (l.rstrip() for l in provides_output.splitlines())
525 provides = set(l for l in provides_lines if l and l != 'setup')
526 deps -= provides
527 850
528 return deps 851 if build_backend == "poetry.core.masonry.api":
852 if "tool" in config and "poetry" in config["tool"]:
853 metadata = config["tool"]["poetry"]
854 else:
855 if "project" in config:
856 metadata = config["project"]
857
858 if metadata:
859 for field, values in metadata.items():
860 if field == "license":
861 # For setuptools.build_meta and flit, licence is a table
862 # but for poetry licence is a string
863 # for hatchling, both table (jsonschema) and string (iniconfig) have been used
864 if build_backend == "poetry.core.masonry.api":
865 value = values
866 else:
867 value = values.get("text", "")
868 if not value:
869 licfile = values.get("file", "")
870 continue
871 elif field == "dependencies" and build_backend == "poetry.core.masonry.api":
872 # For poetry backend, "dependencies" section looks like:
873 # [tool.poetry.dependencies]
874 # requests = "^2.13.0"
875 # requests = { version = "^2.13.0", source = "private" }
876 # See https://python-poetry.org/docs/master/pyproject/#dependencies-and-dependency-groups for more details
877 # This class doesn't handle versions anyway, so we just get the dependencies name here and construct a list
878 value = []
879 for k in values.keys():
880 value.append(k)
881 elif isinstance(values, dict):
882 for k, v in values.items():
883 info[k] = v
884 continue
885 else:
886 value = values
529 887
530 def parse_pkgdata_for_python_packages(self): 888 info[field] = value
531 suffixes = [t[0] for t in imp.get_suffixes()]
532 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
533 889
534 ldata = tinfoil.config_data.createCopy() 890 # Grab the license value before applying replacements
535 bb.parse.handle('classes/python3-dir.bbclass', ldata, True) 891 license_str = info.get("license", "").strip()
536 python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
537 892
538 dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload') 893 if license_str:
539 python_dirs = [python_sitedir + os.sep, 894 for i, line in enumerate(lines_before):
540 os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep, 895 if line.startswith("##LICENSE_PLACEHOLDER##"):
541 os.path.dirname(python_sitedir) + os.sep] 896 lines_before.insert(
542 packages = {} 897 i, "# NOTE: License in pyproject.toml is: %s" % license_str
543 for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)): 898 )
544 files_info = None
545 with open(pkgdatafile, 'r') as f:
546 for line in f.readlines():
547 field, value = line.split(': ', 1)
548 if field == 'FILES_INFO':
549 files_info = ast.literal_eval(value)
550 break 899 break
551 else:
552 continue
553 900
554 for fn in files_info: 901 info["requires"] = config["build-system"]["requires"]
555 for suffix in suffixes: 902
556 if fn.endswith(suffix): 903 self.apply_info_replacements(info)
557 break 904
558 else: 905 if "classifiers" in info:
559 continue 906 license = self.handle_classifier_license(
907 info["classifiers"], info.get("license", "")
908 )
909 if license:
910 if licfile:
911 lines = []
912 md5value = bb.utils.md5_file(os.path.join(srctree, licfile))
913 lines.append('LICENSE = "%s"' % license)
914 lines.append(
915 'LIC_FILES_CHKSUM = "file://%s;md5=%s"'
916 % (licfile, md5value)
917 )
918 lines.append("")
919
920 # Replace the placeholder so we get the values in the right place in the recipe file
921 try:
922 pos = lines_before.index("##LICENSE_PLACEHOLDER##")
923 except ValueError:
924 pos = -1
925 if pos == -1:
926 lines_before.extend(lines)
927 else:
928 lines_before[pos : pos + 1] = lines
560 929
561 if fn.startswith(dynload_dir + os.sep): 930 handled.append(("license", [license, licfile, md5value]))
562 if '/.debug/' in fn: 931 else:
563 continue 932 info["license"] = license
564 base = os.path.basename(fn)
565 provided = base.split('.', 1)[0]
566 packages[provided] = os.path.basename(pkgdatafile)
567 continue
568 933
569 for python_dir in python_dirs: 934 provided_packages = self.parse_pkgdata_for_python_packages()
570 if fn.startswith(python_dir): 935 provided_packages.update(self.known_deps_map)
571 relpath = fn[len(python_dir):] 936 native_mapped_deps, native_unmapped_deps = set(), set()
572 relstart, _, relremaining = relpath.partition(os.sep) 937 mapped_deps, unmapped_deps = set(), set()
573 if relstart.endswith('.egg'):
574 relpath = relremaining
575 base, _ = os.path.splitext(relpath)
576 938
577 if '/.debug/' in base: 939 if "requires" in info:
578 continue 940 for require in info["requires"]:
579 if os.path.basename(base) == '__init__': 941 mapped = provided_packages.get(require)
580 base = os.path.dirname(base)
581 base = base.replace(os.sep + os.sep, os.sep)
582 provided = base.replace(os.sep, '.')
583 packages[provided] = os.path.basename(pkgdatafile)
584 return packages
585 942
586 @classmethod 943 if mapped:
587 def run_command(cls, cmd, **popenargs): 944 logger.debug("Mapped %s to %s" % (require, mapped))
588 if 'stderr' not in popenargs: 945 native_mapped_deps.add(mapped)
589 popenargs['stderr'] = subprocess.STDOUT 946 else:
590 try: 947 logger.debug("Could not map %s" % require)
591 return subprocess.check_output(cmd, **popenargs).decode('utf-8') 948 native_unmapped_deps.add(require)
592 except OSError as exc: 949
593 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc) 950 info.pop("requires")
594 raise 951
595 except subprocess.CalledProcessError as exc: 952 if native_mapped_deps != set():
596 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output) 953 native_mapped_deps = {
597 raise 954 item + "-native" for item in native_mapped_deps
955 }
956 native_mapped_deps -= set(self.excluded_native_pkgdeps)
957 if native_mapped_deps != set():
958 info["requires"] = " ".join(sorted(native_mapped_deps))
959
960 if native_unmapped_deps:
961 lines_after.append("")
962 lines_after.append(
963 "# WARNING: We were unable to map the following python package/module"
964 )
965 lines_after.append(
966 "# dependencies to the bitbake packages which include them:"
967 )
968 lines_after.extend(
969 "# {}".format(d) for d in sorted(native_unmapped_deps)
970 )
971
972 if "dependencies" in info:
973 for dependency in info["dependencies"]:
974 mapped = provided_packages.get(dependency)
975 if mapped:
976 logger.debug("Mapped %s to %s" % (dependency, mapped))
977 mapped_deps.add(mapped)
978 else:
979 logger.debug("Could not map %s" % dependency)
980 unmapped_deps.add(dependency)
981
982 info.pop("dependencies")
983
984 if mapped_deps != set():
985 if mapped_deps != set():
986 info["dependencies"] = " ".join(sorted(mapped_deps))
987
988 if unmapped_deps:
989 lines_after.append("")
990 lines_after.append(
991 "# WARNING: We were unable to map the following python package/module"
992 )
993 lines_after.append(
994 "# runtime dependencies to the bitbake packages which include them:"
995 )
996 lines_after.extend(
997 "# {}".format(d) for d in sorted(unmapped_deps)
998 )
999
1000 self.map_info_to_bbvar(info, extravalues)
1001
1002 handled.append("buildsystem")
1003 except Exception:
1004 logger.exception("Failed to correctly handle pyproject.toml, falling back to another method")
1005 return False
598 1006
599 1007
600def gather_setup_info(fileobj): 1008def gather_setup_info(fileobj):
@@ -710,5 +1118,7 @@ def has_non_literals(value):
710 1118
711 1119
712def register_recipe_handlers(handlers): 1120def register_recipe_handlers(handlers):
713 # We need to make sure this is ahead of the makefile fallback handler 1121 # We need to make sure these are ahead of the makefile fallback handler
714 handlers.append((PythonRecipeHandler(), 70)) 1122 # and the pyproject.toml handler ahead of the setup.py handler
1123 handlers.append((PythonPyprojectTomlRecipeHandler(), 75))
1124 handlers.append((PythonSetupPyRecipeHandler(), 70))
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py
new file mode 100644
index 0000000000..a85a2f2786
--- /dev/null
+++ b/scripts/lib/recipetool/create_go.py
@@ -0,0 +1,777 @@
1# Recipe creation tool - go support plugin
2#
3# The code is based on golang internals. See the afftected
4# methods for further reference and information.
5#
6# Copyright (C) 2023 Weidmueller GmbH & Co KG
7# Author: Lukas Funke <lukas.funke@weidmueller.com>
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12
13from collections import namedtuple
14from enum import Enum
15from html.parser import HTMLParser
16from recipetool.create import RecipeHandler, handle_license_vars
17from recipetool.create import guess_license, tidy_licenses, fixup_license
18from recipetool.create import determine_from_url
19from urllib.error import URLError, HTTPError
20
21import bb.utils
22import json
23import logging
24import os
25import re
26import subprocess
27import sys
28import shutil
29import tempfile
30import urllib.parse
31import urllib.request
32
33
34GoImport = namedtuple('GoImport', 'root vcs url suffix')
35logger = logging.getLogger('recipetool')
36CodeRepo = namedtuple(
37 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor')
38
39tinfoil = None
40
41# Regular expression to parse pseudo semantic version
42# see https://go.dev/ref/mod#pseudo-versions
43re_pseudo_semver = re.compile(
44 r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$")
45# Regular expression to parse semantic version
46re_semver = re.compile(
47 r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
48
49
50def tinfoil_init(instance):
51 global tinfoil
52 tinfoil = instance
53
54
55class GoRecipeHandler(RecipeHandler):
56 """Class to handle the go recipe creation"""
57
58 @staticmethod
59 def __ensure_go():
60 """Check if the 'go' command is available in the recipes"""
61 recipe = "go-native"
62 if not tinfoil.recipes_parsed:
63 tinfoil.parse_recipes()
64 try:
65 rd = tinfoil.parse_recipe(recipe)
66 except bb.providers.NoProvider:
67 bb.error(
68 "Nothing provides '%s' which is required for the build" % (recipe))
69 bb.note(
70 "You will likely need to add a layer that provides '%s'" % (recipe))
71 return None
72
73 bindir = rd.getVar('STAGING_BINDIR_NATIVE')
74 gopath = os.path.join(bindir, 'go')
75
76 if not os.path.exists(gopath):
77 tinfoil.build_targets(recipe, 'addto_recipe_sysroot')
78
79 if not os.path.exists(gopath):
80 logger.error(
81 '%s required to process specified source, but %s did not seem to populate it' % 'go', recipe)
82 return None
83
84 return bindir
85
86 def __resolve_repository_static(self, modulepath):
87 """Resolve the repository in a static manner
88
89 The method is based on the go implementation of
90 `repoRootFromVCSPaths` in
91 https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
92 """
93
94 url = urllib.parse.urlparse("https://" + modulepath)
95 req = urllib.request.Request(url.geturl())
96
97 try:
98 resp = urllib.request.urlopen(req)
99 # Some modulepath are just redirects to github (or some other vcs
100 # hoster). Therefore, we check if this modulepath redirects to
101 # somewhere else
102 if resp.geturl() != url.geturl():
103 bb.debug(1, "%s is redirectred to %s" %
104 (url.geturl(), resp.geturl()))
105 url = urllib.parse.urlparse(resp.geturl())
106 modulepath = url.netloc + url.path
107
108 except URLError as url_err:
109 # This is probably because the module path
110 # contains the subdir and major path. Thus,
111 # we ignore this error for now
112 logger.debug(
113 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err)))
114
115 host, _, _ = modulepath.partition('/')
116
117 class vcs(Enum):
118 pathprefix = "pathprefix"
119 regexp = "regexp"
120 type = "type"
121 repo = "repo"
122 check = "check"
123 schemelessRepo = "schemelessRepo"
124
125 # GitHub
126 vcsGitHub = {}
127 vcsGitHub[vcs.pathprefix] = "github.com"
128 vcsGitHub[vcs.regexp] = re.compile(
129 r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
130 vcsGitHub[vcs.type] = "git"
131 vcsGitHub[vcs.repo] = "https://\\g<root>"
132
133 # Bitbucket
134 vcsBitbucket = {}
135 vcsBitbucket[vcs.pathprefix] = "bitbucket.org"
136 vcsBitbucket[vcs.regexp] = re.compile(
137 r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
138 vcsBitbucket[vcs.type] = "git"
139 vcsBitbucket[vcs.repo] = "https://\\g<root>"
140
141 # IBM DevOps Services (JazzHub)
142 vcsIBMDevOps = {}
143 vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git"
144 vcsIBMDevOps[vcs.regexp] = re.compile(
145 r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
146 vcsIBMDevOps[vcs.type] = "git"
147 vcsIBMDevOps[vcs.repo] = "https://\\g<root>"
148
149 # Git at Apache
150 vcsApacheGit = {}
151 vcsApacheGit[vcs.pathprefix] = "git.apache.org"
152 vcsApacheGit[vcs.regexp] = re.compile(
153 r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
154 vcsApacheGit[vcs.type] = "git"
155 vcsApacheGit[vcs.repo] = "https://\\g<root>"
156
157 # Git at OpenStack
158 vcsOpenStackGit = {}
159 vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org"
160 vcsOpenStackGit[vcs.regexp] = re.compile(
161 r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
162 vcsOpenStackGit[vcs.type] = "git"
163 vcsOpenStackGit[vcs.repo] = "https://\\g<root>"
164
165 # chiselapp.com for fossil
166 vcsChiselapp = {}
167 vcsChiselapp[vcs.pathprefix] = "chiselapp.com"
168 vcsChiselapp[vcs.regexp] = re.compile(
169 r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$')
170 vcsChiselapp[vcs.type] = "fossil"
171 vcsChiselapp[vcs.repo] = "https://\\g<root>"
172
173 # General syntax for any server.
174 # Must be last.
175 vcsGeneralServer = {}
176 vcsGeneralServer[vcs.regexp] = re.compile(
177 "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$")
178 vcsGeneralServer[vcs.schemelessRepo] = True
179
180 vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps,
181 vcsApacheGit, vcsOpenStackGit, vcsChiselapp,
182 vcsGeneralServer]
183
184 if modulepath.startswith("example.net") or modulepath == "rsc.io":
185 logger.warning("Suspicious module path %s" % modulepath)
186 return None
187 if modulepath.startswith("http:") or modulepath.startswith("https:"):
188 logger.warning("Import path should not start with %s %s" %
189 ("http", "https"))
190 return None
191
192 rootpath = None
193 vcstype = None
194 repourl = None
195 suffix = None
196
197 for srv in vcsPaths:
198 m = srv[vcs.regexp].match(modulepath)
199 if vcs.pathprefix in srv:
200 if host == srv[vcs.pathprefix]:
201 rootpath = m.group('root')
202 vcstype = srv[vcs.type]
203 repourl = m.expand(srv[vcs.repo])
204 suffix = m.group('suffix')
205 break
206 elif m and srv[vcs.schemelessRepo]:
207 rootpath = m.group('root')
208 vcstype = m[vcs.type]
209 repourl = m[vcs.repo]
210 suffix = m.group('suffix')
211 break
212
213 return GoImport(rootpath, vcstype, repourl, suffix)
214
215 def __resolve_repository_dynamic(self, modulepath):
216 """Resolve the repository root in a dynamic manner.
217
218 The method is based on the go implementation of
219 `repoRootForImportDynamic` in
220 https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
221 """
222 url = urllib.parse.urlparse("https://" + modulepath)
223
224 class GoImportHTMLParser(HTMLParser):
225
226 def __init__(self):
227 super().__init__()
228 self.__srv = {}
229
230 def handle_starttag(self, tag, attrs):
231 if tag == 'meta' and list(
232 filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)):
233 content = list(
234 filter(lambda a: (a[0] == 'content'), attrs))
235 if content:
236 srv = content[0][1].split()
237 self.__srv[srv[0]] = srv
238
239 def go_import(self, modulepath):
240 if modulepath in self.__srv:
241 srv = self.__srv[modulepath]
242 return GoImport(srv[0], srv[1], srv[2], None)
243 return None
244
245 url = url.geturl() + "?go-get=1"
246 req = urllib.request.Request(url)
247
248 try:
249 body = urllib.request.urlopen(req).read()
250 except HTTPError as http_err:
251 logger.warning(
252 "Unclean status when fetching page from [%s]: %s", url, str(http_err))
253 body = http_err.fp.read()
254 except URLError as url_err:
255 logger.warning(
256 "Failed to fetch page from [%s]: %s", url, str(url_err))
257 return None
258
259 parser = GoImportHTMLParser()
260 parser.feed(body.decode('utf-8'))
261 parser.close()
262
263 return parser.go_import(modulepath)
264
265 def __resolve_from_golang_proxy(self, modulepath, version):
266 """
267 Resolves repository data from golang proxy
268 """
269 url = urllib.parse.urlparse("https://proxy.golang.org/"
270 + modulepath
271 + "/@v/"
272 + version
273 + ".info")
274
275 # Transform url to lower case, golang proxy doesn't like mixed case
276 req = urllib.request.Request(url.geturl().lower())
277
278 try:
279 resp = urllib.request.urlopen(req)
280 except URLError as url_err:
281 logger.warning(
282 "Failed to fetch page from [%s]: %s", url, str(url_err))
283 return None
284
285 golang_proxy_res = resp.read().decode('utf-8')
286 modinfo = json.loads(golang_proxy_res)
287
288 if modinfo and 'Origin' in modinfo:
289 origin = modinfo['Origin']
290 _root_url = urllib.parse.urlparse(origin['URL'])
291
292 # We normalize the repo URL since we don't want the scheme in it
293 _subdir = origin['Subdir'] if 'Subdir' in origin else None
294 _root, _, _ = self.__split_path_version(modulepath)
295 if _subdir:
296 _root = _root[:-len(_subdir)].strip('/')
297
298 _commit = origin['Hash']
299 _vcs = origin['VCS']
300 return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit)
301
302 return None
303
304 def __resolve_repository(self, modulepath):
305 """
306 Resolves src uri from go module-path
307 """
308 repodata = self.__resolve_repository_static(modulepath)
309 if not repodata or not repodata.url:
310 repodata = self.__resolve_repository_dynamic(modulepath)
311 if not repodata or not repodata.url:
312 logger.error(
313 "Could not resolve repository for module path '%s'" % modulepath)
314 # There is no way to recover from this
315 sys.exit(14)
316 if repodata:
317 logger.debug(1, "Resolved download path for import '%s' => %s" % (
318 modulepath, repodata.url))
319 return repodata
320
321 def __split_path_version(self, path):
322 i = len(path)
323 dot = False
324 for j in range(i, 0, -1):
325 if path[j - 1] < '0' or path[j - 1] > '9':
326 break
327 if path[j - 1] == '.':
328 dot = True
329 break
330 i = j - 1
331
332 if i <= 1 or i == len(
333 path) or path[i - 1] != 'v' or path[i - 2] != '/':
334 return path, "", True
335
336 prefix, pathMajor = path[:i - 2], path[i - 2:]
337 if dot or len(
338 pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1":
339 return path, "", False
340
341 return prefix, pathMajor, True
342
343 def __get_path_major(self, pathMajor):
344 if not pathMajor:
345 return ""
346
347 if pathMajor[0] != '/' and pathMajor[0] != '.':
348 logger.error(
349 "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor)
350
351 if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"):
352 pathMajor = pathMajor[:len("-unstable") - 2]
353
354 return pathMajor[1:]
355
356 def __build_coderepo(self, repo, path):
357 codedir = ""
358 pathprefix, pathMajor, _ = self.__split_path_version(path)
359 if repo.root == path:
360 pathprefix = path
361 elif path.startswith(repo.root):
362 codedir = pathprefix[len(repo.root):].strip('/')
363
364 pseudoMajor = self.__get_path_major(pathMajor)
365
366 logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'",
367 repo.root, codedir, pathprefix, pathMajor, pseudoMajor)
368
369 return CodeRepo(path, repo.root, codedir,
370 pathMajor, pathprefix, pseudoMajor)
371
372 def __resolve_version(self, repo, path, version):
373 hash = None
374 coderoot = self.__build_coderepo(repo, path)
375
376 def vcs_fetch_all():
377 tmpdir = tempfile.mkdtemp()
378 clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir)
379 bb.process.run(clone_cmd)
380 log_cmd = "git log --all --pretty='%H %d' --decorate=short"
381 output, _ = bb.process.run(
382 log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir)
383 bb.utils.prunedir(tmpdir)
384 return output.strip().split('\n')
385
386 def vcs_fetch_remote(tag):
387 # add * to grab ^{}
388 refs = {}
389 ls_remote_cmd = "git ls-remote -q --tags {} {}*".format(
390 repo.url, tag)
391 output, _ = bb.process.run(ls_remote_cmd)
392 output = output.strip().split('\n')
393 for line in output:
394 f = line.split(maxsplit=1)
395 if len(f) != 2:
396 continue
397
398 for prefix in ["HEAD", "refs/heads/", "refs/tags/"]:
399 if f[1].startswith(prefix):
400 refs[f[1][len(prefix):]] = f[0]
401
402 for key, hash in refs.items():
403 if key.endswith(r"^{}"):
404 refs[key.strip(r"^{}")] = hash
405
406 return refs[tag]
407
408 m_pseudo_semver = re_pseudo_semver.match(version)
409
410 if m_pseudo_semver:
411 remote_refs = vcs_fetch_all()
412 short_commit = m_pseudo_semver.group('commithash')
413 for l in remote_refs:
414 r = l.split(maxsplit=1)
415 sha1 = r[0] if len(r) else None
416 if not sha1:
417 logger.error(
418 "Ups: could not resolve abbref commit for %s" % short_commit)
419
420 elif sha1.startswith(short_commit):
421 hash = sha1
422 break
423 else:
424 m_semver = re_semver.match(version)
425 if m_semver:
426
427 def get_sha1_remote(re):
428 rsha1 = None
429 for line in remote_refs:
430 # Split lines of the following format:
431 # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag)
432 lineparts = line.split(maxsplit=1)
433 sha1 = lineparts[0] if len(lineparts) else None
434 refstring = lineparts[1] if len(
435 lineparts) == 2 else None
436 if refstring:
437 # Normalize tag string and split in case of multiple
438 # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...)
439 refs = refstring.strip('(), ').split(',')
440 for ref in refs:
441 if re.match(ref.strip()):
442 rsha1 = sha1
443 return rsha1
444
445 semver = "v" + m_semver.group('major') + "."\
446 + m_semver.group('minor') + "."\
447 + m_semver.group('patch') \
448 + (("-" + m_semver.group('prerelease'))
449 if m_semver.group('prerelease') else "")
450
451 tag = os.path.join(
452 coderoot.codeDir, semver) if coderoot.codeDir else semver
453
454 # probe tag using 'ls-remote', which is faster than fetching
455 # complete history
456 hash = vcs_fetch_remote(tag)
457 if not hash:
458 # backup: fetch complete history
459 remote_refs = vcs_fetch_all()
460 hash = get_sha1_remote(
461 re.compile(fr"(tag:|HEAD ->) ({tag})"))
462
463 logger.debug(
464 "Resolving commit for tag '%s' -> '%s'", tag, hash)
465 return hash
466
467 def __generate_srcuri_inline_fcn(self, path, version, replaces=None):
468 """Generate SRC_URI functions for go imports"""
469
470 logger.info("Resolving repository for module %s", path)
471 # First try to resolve repo and commit from golang proxy
472 # Most info is already there and we don't have to go through the
473 # repository or even perform the version resolve magic
474 golang_proxy_info = self.__resolve_from_golang_proxy(path, version)
475 if golang_proxy_info:
476 repo = golang_proxy_info[0]
477 commit = golang_proxy_info[1]
478 else:
479 # Fallback
480 # Resolve repository by 'hand'
481 repo = self.__resolve_repository(path)
482 commit = self.__resolve_version(repo, path, version)
483
484 url = urllib.parse.urlparse(repo.url)
485 repo_url = url.netloc + url.path
486
487 coderoot = self.__build_coderepo(repo, path)
488
489 inline_fcn = "${@go_src_uri("
490 inline_fcn += f"'{repo_url}','{version}'"
491 if repo_url != path:
492 inline_fcn += f",path='{path}'"
493 if coderoot.codeDir:
494 inline_fcn += f",subdir='{coderoot.codeDir}'"
495 if repo.vcs != 'git':
496 inline_fcn += f",vcs='{repo.vcs}'"
497 if replaces:
498 inline_fcn += f",replaces='{replaces}'"
499 if coderoot.pathMajor:
500 inline_fcn += f",pathmajor='{coderoot.pathMajor}'"
501 inline_fcn += ")}"
502
503 return inline_fcn, commit
504
505 def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d):
506
507 import re
508 src_uris = []
509 src_revs = []
510
511 def generate_src_rev(path, version, commithash):
512 src_rev = f"# {path}@{version} => {commithash}\n"
513 # Ups...maybe someone manipulated the source repository and the
514 # version or commit could not be resolved. This is a sign of
515 # a) the supply chain was manipulated (bad)
516 # b) the implementation for the version resolving didn't work
517 # anymore (less bad)
518 if not commithash:
519 src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
520 src_rev += f"#!!! Could not resolve version !!!\n"
521 src_rev += f"#!!! Possible supply chain attack !!!\n"
522 src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
523 src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\""
524
525 return src_rev
526
527 # we first go over replacement list, because we are essentialy
528 # interested only in the replaced path
529 if go_mod['Replace']:
530 for replacement in go_mod['Replace']:
531 oldpath = replacement['Old']['Path']
532 path = replacement['New']['Path']
533 version = ''
534 if 'Version' in replacement['New']:
535 version = replacement['New']['Version']
536
537 if os.path.exists(os.path.join(srctree, path)):
538 # the module refers to the local path, remove it from requirement list
539 # because it's a local module
540 go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath]
541 else:
542 # Replace the path and the version, so we don't iterate replacement list anymore
543 for require in go_mod['Require']:
544 if require['Path'] == oldpath:
545 require.update({'Path': path, 'Version': version})
546 break
547
548 for require in go_mod['Require']:
549 path = require['Path']
550 version = require['Version']
551
552 inline_fcn, commithash = self.__generate_srcuri_inline_fcn(
553 path, version)
554 src_uris.append(inline_fcn)
555 src_revs.append(generate_src_rev(path, version, commithash))
556
557 # strip version part from module URL /vXX
558 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
559 pn, _ = determine_from_url(baseurl)
560 go_mods_basename = "%s-modules.inc" % pn
561
562 go_mods_filename = os.path.join(localfilesdir, go_mods_basename)
563 with open(go_mods_filename, "w") as f:
564 # We introduce this indirection to make the tests a little easier
565 f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n")
566 f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n")
567 for uri in src_uris:
568 f.write(" " + uri + " \\\n")
569 f.write("\"\n\n")
570 for rev in src_revs:
571 f.write(rev + "\n")
572
573 extravalues['extrafiles'][go_mods_basename] = go_mods_filename
574
575 def __go_run_cmd(self, cmd, cwd, d):
576 return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')),
577 shell=True, cwd=cwd)
578
579 def __go_native_version(self, d):
580 stdout, _ = self.__go_run_cmd("go version", None, d)
581 m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout)
582 major = int(m.group(2))
583 minor = int(m.group(3))
584 patch = int(m.group(4))
585
586 return major, minor, patch
587
588 def __go_mod_patch(self, srctree, localfilesdir, extravalues, d):
589
590 patchfilename = "go.mod.patch"
591 go_native_version_major, go_native_version_minor, _ = self.__go_native_version(
592 d)
593 self.__go_run_cmd("go mod tidy -go=%d.%d" %
594 (go_native_version_major, go_native_version_minor), srctree, d)
595 stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
596
597 # Create patch in order to upgrade go version
598 self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d)
599 # Restore original state
600 self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d)
601
602 go_mod = json.loads(stdout)
603 tmpfile = os.path.join(localfilesdir, patchfilename)
604 shutil.move(os.path.join(srctree, patchfilename), tmpfile)
605
606 extravalues['extrafiles'][patchfilename] = tmpfile
607
608 return go_mod, patchfilename
609
610 def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d):
611 # Perform vendoring to retrieve the correct modules.txt
612 tmp_vendor_dir = tempfile.mkdtemp()
613
614 # -v causes to go to print modules.txt to stderr
615 _, stderr = self.__go_run_cmd(
616 "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d)
617
618 modules_txt_basename = "modules.txt"
619 modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename)
620 with open(modules_txt_filename, "w") as f:
621 f.write(stderr)
622
623 extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename
624
625 licenses = []
626 lic_files_chksum = []
627 licvalues = guess_license(tmp_vendor_dir, d)
628 shutil.rmtree(tmp_vendor_dir)
629
630 if licvalues:
631 for licvalue in licvalues:
632 license = licvalue[0]
633 lics = tidy_licenses(fixup_license(license))
634 lics = [lic for lic in lics if lic not in licenses]
635 if len(lics):
636 licenses.extend(lics)
637 lic_files_chksum.append(
638 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2]))
639
640 # strip version part from module URL /vXX
641 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
642 pn, _ = determine_from_url(baseurl)
643 licenses_basename = "%s-licenses.inc" % pn
644
645 licenses_filename = os.path.join(localfilesdir, licenses_basename)
646 with open(licenses_filename, "w") as f:
647 f.write("GO_MOD_LICENSES = \"%s\"\n\n" %
648 ' & '.join(sorted(licenses, key=str.casefold)))
649 # We introduce this indirection to make the tests a little easier
650 f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n")
651 f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n")
652 for lic in lic_files_chksum:
653 f.write(" " + lic + " \\\n")
654 f.write("\"\n")
655
656 extravalues['extrafiles'][licenses_basename] = licenses_filename
657
658 def process(self, srctree, classes, lines_before,
659 lines_after, handled, extravalues):
660
661 if 'buildsystem' in handled:
662 return False
663
664 files = RecipeHandler.checkfiles(srctree, ['go.mod'])
665 if not files:
666 return False
667
668 d = bb.data.createCopy(tinfoil.config_data)
669 go_bindir = self.__ensure_go()
670 if not go_bindir:
671 sys.exit(14)
672
673 d.prependVar('PATH', '%s:' % go_bindir)
674 handled.append('buildsystem')
675 classes.append("go-vendor")
676
677 stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
678
679 go_mod = json.loads(stdout)
680 go_import = go_mod['Module']['Path']
681 go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go'])
682 go_version_major = int(go_version_match.group(1))
683 go_version_minor = int(go_version_match.group(2))
684 src_uris = []
685
686 localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-')
687 extravalues.setdefault('extrafiles', {})
688
689 # Use an explicit name determined from the module name because it
690 # might differ from the actual URL for replaced modules
691 # strip version part from module URL /vXX
692 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
693 pn, _ = determine_from_url(baseurl)
694
695 # go.mod files with version < 1.17 may not include all indirect
696 # dependencies. Thus, we have to upgrade the go version.
697 if go_version_major == 1 and go_version_minor < 17:
698 logger.warning(
699 "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.")
700 go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir,
701 extravalues, d)
702 src_uris.append(
703 "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename))
704
705 # Check whether the module is vendored. If so, we have nothing to do.
706 # Otherwise we gather all dependencies and add them to the recipe
707 if not os.path.exists(os.path.join(srctree, "vendor")):
708
709 # Write additional $BPN-modules.inc file
710 self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d)
711 lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"")
712 lines_before.append("require %s-licenses.inc" % (pn))
713
714 self.__rewrite_src_uri(lines_before, ["file://modules.txt"])
715
716 self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d)
717 lines_before.append("require %s-modules.inc" % (pn))
718
719 # Do generic license handling
720 handle_license_vars(srctree, lines_before, handled, extravalues, d)
721 self.__rewrite_lic_uri(lines_before)
722
723 lines_before.append("GO_IMPORT = \"{}\"".format(baseurl))
724 lines_before.append("SRCREV_FORMAT = \"${BPN}\"")
725
726 def __update_lines_before(self, updated, newlines, lines_before):
727 if updated:
728 del lines_before[:]
729 for line in newlines:
730 # Hack to avoid newlines that edit_metadata inserts
731 if line.endswith('\n'):
732 line = line[:-1]
733 lines_before.append(line)
734 return updated
735
736 def __rewrite_lic_uri(self, lines_before):
737
738 def varfunc(varname, origvalue, op, newlines):
739 if varname == 'LIC_FILES_CHKSUM':
740 new_licenses = []
741 licenses = origvalue.split('\\')
742 for license in licenses:
743 if not license:
744 logger.warning("No license file was detected for the main module!")
745 # the license list of the main recipe must be empty
746 # this can happen for example in case of CLOSED license
747 # Fall through to complete recipe generation
748 continue
749 license = license.strip()
750 uri, chksum = license.split(';', 1)
751 url = urllib.parse.urlparse(uri)
752 new_uri = os.path.join(
753 url.scheme + "://", "src", "${GO_IMPORT}", url.netloc + url.path) + ";" + chksum
754 new_licenses.append(new_uri)
755
756 return new_licenses, None, -1, True
757 return origvalue, None, 0, True
758
759 updated, newlines = bb.utils.edit_metadata(
760 lines_before, ['LIC_FILES_CHKSUM'], varfunc)
761 return self.__update_lines_before(updated, newlines, lines_before)
762
763 def __rewrite_src_uri(self, lines_before, additional_uris = []):
764
765 def varfunc(varname, origvalue, op, newlines):
766 if varname == 'SRC_URI':
767 src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"]
768 src_uri.extend(additional_uris)
769 return src_uri, None, -1, True
770 return origvalue, None, 0, True
771
772 updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc)
773 return self.__update_lines_before(updated, newlines, lines_before)
774
775
776def register_recipe_handlers(handlers):
777 handlers.append((GoRecipeHandler(), 60))
diff --git a/scripts/lib/recipetool/create_kmod.py b/scripts/lib/recipetool/create_kmod.py
index 85b5c48e53..cc00106961 100644
--- a/scripts/lib/recipetool/create_kmod.py
+++ b/scripts/lib/recipetool/create_kmod.py
@@ -113,7 +113,7 @@ class KernelModuleRecipeHandler(RecipeHandler):
113 kdirpath, _ = check_target(compile_lines, install=False) 113 kdirpath, _ = check_target(compile_lines, install=False)
114 114
115 if manual_install or not install_lines: 115 if manual_install or not install_lines:
116 lines_after.append('EXTRA_OEMAKE_append_task-install = " -C ${STAGING_KERNEL_DIR} M=${S}"') 116 lines_after.append('EXTRA_OEMAKE:append:task-install = " -C ${STAGING_KERNEL_DIR} M=${S}"')
117 elif install_target and install_target != 'modules_install': 117 elif install_target and install_target != 'modules_install':
118 lines_after.append('MODULES_INSTALL_TARGET = "install"') 118 lines_after.append('MODULES_INSTALL_TARGET = "install"')
119 119
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py
index 2bcae91dfa..113a89f6a6 100644
--- a/scripts/lib/recipetool/create_npm.py
+++ b/scripts/lib/recipetool/create_npm.py
@@ -6,16 +6,20 @@
6"""Recipe creation tool - npm module support plugin""" 6"""Recipe creation tool - npm module support plugin"""
7 7
8import json 8import json
9import logging
9import os 10import os
10import re 11import re
11import sys 12import sys
12import tempfile 13import tempfile
13import bb 14import bb
14from bb.fetch2.npm import NpmEnvironment 15from bb.fetch2.npm import NpmEnvironment
16from bb.fetch2.npm import npm_package
15from bb.fetch2.npmsw import foreach_dependencies 17from bb.fetch2.npmsw import foreach_dependencies
16from recipetool.create import RecipeHandler 18from recipetool.create import RecipeHandler
19from recipetool.create import get_license_md5sums
17from recipetool.create import guess_license 20from recipetool.create import guess_license
18from recipetool.create import split_pkg_licenses 21from recipetool.create import split_pkg_licenses
22logger = logging.getLogger('recipetool')
19 23
20TINFOIL = None 24TINFOIL = None
21 25
@@ -28,15 +32,6 @@ class NpmRecipeHandler(RecipeHandler):
28 """Class to handle the npm recipe creation""" 32 """Class to handle the npm recipe creation"""
29 33
30 @staticmethod 34 @staticmethod
31 def _npm_name(name):
32 """Generate a Yocto friendly npm name"""
33 name = re.sub("/", "-", name)
34 name = name.lower()
35 name = re.sub(r"[^\-a-z0-9]", "", name)
36 name = name.strip("-")
37 return name
38
39 @staticmethod
40 def _get_registry(lines): 35 def _get_registry(lines):
41 """Get the registry value from the 'npm://registry' url""" 36 """Get the registry value from the 'npm://registry' url"""
42 registry = None 37 registry = None
@@ -118,23 +113,32 @@ class NpmRecipeHandler(RecipeHandler):
118 licfiles = [] 113 licfiles = []
119 packages = {} 114 packages = {}
120 115
121 def _licfiles_append(licfile):
122 """Append 'licfile' to the license files list"""
123 licfilepath = os.path.join(srctree, licfile)
124 licmd5 = bb.utils.md5_file(licfilepath)
125 licfiles.append("file://%s;md5=%s" % (licfile, licmd5))
126
127 # Handle the parent package 116 # Handle the parent package
128 _licfiles_append("package.json")
129 packages["${PN}"] = "" 117 packages["${PN}"] = ""
130 118
119 def _licfiles_append_fallback_readme_files(destdir):
120 """Append README files as fallback to license files if a license files is missing"""
121
122 fallback = True
123 readmes = []
124 basedir = os.path.join(srctree, destdir)
125 for fn in os.listdir(basedir):
126 upper = fn.upper()
127 if upper.startswith("README"):
128 fullpath = os.path.join(basedir, fn)
129 readmes.append(fullpath)
130 if upper.startswith("COPYING") or "LICENCE" in upper or "LICENSE" in upper:
131 fallback = False
132 if fallback:
133 for readme in readmes:
134 licfiles.append(os.path.relpath(readme, srctree))
135
131 # Handle the dependencies 136 # Handle the dependencies
132 def _handle_dependency(name, params, deptree): 137 def _handle_dependency(name, params, destdir):
133 suffix = "-".join([self._npm_name(dep) for dep in deptree]) 138 deptree = destdir.split('node_modules/')
134 destdirs = [os.path.join("node_modules", dep) for dep in deptree] 139 suffix = "-".join([npm_package(dep) for dep in deptree])
135 destdir = os.path.join(*destdirs) 140 packages["${PN}" + suffix] = destdir
136 _licfiles_append(os.path.join(destdir, "package.json")) 141 _licfiles_append_fallback_readme_files(destdir)
137 packages["${PN}-" + suffix] = destdir
138 142
139 with open(shrinkwrap_file, "r") as f: 143 with open(shrinkwrap_file, "r") as f:
140 shrinkwrap = json.load(f) 144 shrinkwrap = json.load(f)
@@ -142,6 +146,23 @@ class NpmRecipeHandler(RecipeHandler):
142 foreach_dependencies(shrinkwrap, _handle_dependency, dev) 146 foreach_dependencies(shrinkwrap, _handle_dependency, dev)
143 147
144 return licfiles, packages 148 return licfiles, packages
149
150 # Handle the peer dependencies
151 def _handle_peer_dependency(self, shrinkwrap_file):
152 """Check if package has peer dependencies and show warning if it is the case"""
153 with open(shrinkwrap_file, "r") as f:
154 shrinkwrap = json.load(f)
155
156 packages = shrinkwrap.get("packages", {})
157 peer_deps = packages.get("", {}).get("peerDependencies", {})
158
159 for peer_dep in peer_deps:
160 peer_dep_yocto_name = npm_package(peer_dep)
161 bb.warn(peer_dep + " is a peer dependencie of the actual package. " +
162 "Please add this peer dependencie to the RDEPENDS variable as %s and generate its recipe with devtool"
163 % peer_dep_yocto_name)
164
165
145 166
146 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): 167 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
147 """Handle the npm recipe creation""" 168 """Handle the npm recipe creation"""
@@ -160,7 +181,7 @@ class NpmRecipeHandler(RecipeHandler):
160 if "name" not in data or "version" not in data: 181 if "name" not in data or "version" not in data:
161 return False 182 return False
162 183
163 extravalues["PN"] = self._npm_name(data["name"]) 184 extravalues["PN"] = npm_package(data["name"])
164 extravalues["PV"] = data["version"] 185 extravalues["PV"] = data["version"]
165 186
166 if "description" in data: 187 if "description" in data:
@@ -229,7 +250,7 @@ class NpmRecipeHandler(RecipeHandler):
229 value = origvalue.replace("version=" + data["version"], "version=${PV}") 250 value = origvalue.replace("version=" + data["version"], "version=${PV}")
230 value = value.replace("version=latest", "version=${PV}") 251 value = value.replace("version=latest", "version=${PV}")
231 values = [line.strip() for line in value.strip('\n').splitlines()] 252 values = [line.strip() for line in value.strip('\n').splitlines()]
232 if "dependencies" in shrinkwrap: 253 if "dependencies" in shrinkwrap.get("packages", {}).get("", {}):
233 values.append(url_recipe) 254 values.append(url_recipe)
234 return values, None, 4, False 255 return values, None, 4, False
235 256
@@ -246,12 +267,42 @@ class NpmRecipeHandler(RecipeHandler):
246 267
247 bb.note("Handling licences ...") 268 bb.note("Handling licences ...")
248 (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev) 269 (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev)
249 extravalues["LIC_FILES_CHKSUM"] = licfiles 270
250 split_pkg_licenses(guess_license(srctree, d), packages, lines_after, []) 271 def _guess_odd_license(licfiles):
272 import bb
273
274 md5sums = get_license_md5sums(d, linenumbers=True)
275
276 chksums = []
277 licenses = []
278 for licfile in licfiles:
279 f = os.path.join(srctree, licfile)
280 md5value = bb.utils.md5_file(f)
281 (license, beginline, endline, md5) = md5sums.get(md5value,
282 (None, "", "", ""))
283 if not license:
284 license = "Unknown"
285 logger.info("Please add the following line for '%s' to a "
286 "'lib/recipetool/licenses.csv' and replace `Unknown`, "
287 "`X`, `Y` and `MD5` with the license, begin line, "
288 "end line and partial MD5 checksum:\n" \
289 "%s,Unknown,X,Y,MD5" % (licfile, md5value))
290 chksums.append("file://%s%s%s;md5=%s" % (licfile,
291 ";beginline=%s" % (beginline) if beginline else "",
292 ";endline=%s" % (endline) if endline else "",
293 md5 if md5 else md5value))
294 licenses.append((license, licfile, md5value))
295 return (licenses, chksums)
296
297 (licenses, extravalues["LIC_FILES_CHKSUM"]) = _guess_odd_license(licfiles)
298 split_pkg_licenses([*licenses, *guess_license(srctree, d)], packages, lines_after)
251 299
252 classes.append("npm") 300 classes.append("npm")
253 handled.append("buildsystem") 301 handled.append("buildsystem")
254 302
303 # Check if package has peer dependencies and inform the user
304 self._handle_peer_dependency(shrinkwrap_file)
305
255 return True 306 return True
256 307
257def register_recipe_handlers(handlers): 308def register_recipe_handlers(handlers):
diff --git a/scripts/lib/recipetool/licenses.csv b/scripts/lib/recipetool/licenses.csv
new file mode 100644
index 0000000000..80851111b3
--- /dev/null
+++ b/scripts/lib/recipetool/licenses.csv
@@ -0,0 +1,37 @@
10636e73ff0215e8d672dc4c32c317bb3,GPL-2.0-only
212f884d2ae1ff87c09e5b7ccc2c4ca7e,GPL-2.0-only
318810669f13b87348459e611d31ab760,GPL-2.0-only
4252890d9eee26aab7b432e8b8a616475,LGPL-2.0-only
52d5025d4aa3495befef8f17206a5b0a1,LGPL-2.1-only
63214f080875748938ba060314b4f727d,LGPL-2.0-only
7385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0-only
8393a5ca445f6965873eca0259a17f833,GPL-2.0-only
93b83ef96387f14655fc854ddc3c6bd57,Apache-2.0
103bf50002aefd002f49e7bb854063f7e7,LGPL-2.0-only
114325afd396febcb659c36b49533135d4,GPL-2.0-only
124fbd65380cdd255951079008b364516c,LGPL-2.1-only
1354c7042be62e169199200bc6477f04d1,BSD-3-Clause
1455ca817ccb7d5b5b66355690e9abc605,LGPL-2.0-only
1559530bdf33659b29e73d4adb9f9f6552,GPL-2.0-only
165f30f0716dfdd0d91eb439ebec522ec2,LGPL-2.0-only
176a6a8e020838b23406c81b19c1d46df6,LGPL-3.0-only
18751419260aa954499f7abaabaa882bbe,GPL-2.0-only
197fbc338309ac38fefcd64b04bb903e34,LGPL-2.1-only
208ca43cbc842c2336e835926c2166c28b,GPL-2.0-only
2194d55d512a9ba36caa9b7df079bae19f,GPL-2.0-only
229ac2e7cff1ddaf48b6eab6028f23ef88,GPL-2.0-only
239f604d8a4f8e74f4f5140845a21b6674,LGPL-2.0-only
24a6f89e2100d9b6cdffcea4f398e37343,LGPL-2.1-only
25b234ee4d69f5fce4486a80fdaf4a4263,GPL-2.0-only
26bbb461211a33b134d42ed5ee802b37ff,LGPL-2.1-only
27bfe1f75d606912a4111c90743d6c7325,MPL-1.1-only
28c93c0550bd3173f4504b2cbd8991e50b,GPL-2.0-only
29d32239bcb673463ab874e80d47fae504,GPL-3.0-only
30d7810fab7487fb0aad327b76f1be7cd7,GPL-2.0-only
31d8045f3b8f929c1cb29a1e3fd737b499,LGPL-2.1-only
32db979804f025cf55aabec7129cb671ed,LGPL-2.0-only
33eb723b61539feef013de476e68b5c50a,GPL-2.0-only
34ebb5c50ab7cab4baeffba14977030c07,GPL-2.0-only
35f27defe1e96c2e1ecd4e0c9be8967949,GPL-3.0-only
36fad9b3332be894bab9bc501572864b29,LGPL-2.1-only
37fbc093901857fcd118f065f900982c24,LGPL-2.1-only
diff --git a/scripts/lib/recipetool/setvar.py b/scripts/lib/recipetool/setvar.py
index f8e2ee75fb..b5ad335cae 100644
--- a/scripts/lib/recipetool/setvar.py
+++ b/scripts/lib/recipetool/setvar.py
@@ -49,6 +49,7 @@ def setvar(args):
49 for patch in patches: 49 for patch in patches:
50 for line in patch: 50 for line in patch:
51 sys.stdout.write(line) 51 sys.stdout.write(line)
52 tinfoil.modified_files()
52 return 0 53 return 0
53 54
54 55
diff --git a/scripts/lib/resulttool/log.py b/scripts/lib/resulttool/log.py
index eb3927ec82..15148ca288 100644
--- a/scripts/lib/resulttool/log.py
+++ b/scripts/lib/resulttool/log.py
@@ -28,12 +28,10 @@ def show_reproducible(result, reproducible, logger):
28def log(args, logger): 28def log(args, logger):
29 results = resultutils.load_resultsdata(args.source) 29 results = resultutils.load_resultsdata(args.source)
30 30
31 ptest_count = sum(1 for _, _, _, r in resultutils.test_run_results(results) if 'ptestresult.sections' in r)
32 if ptest_count > 1 and not args.prepend_run:
33 print("%i ptest sections found. '--prepend-run' is required" % ptest_count)
34 return 1
35
36 for _, run_name, _, r in resultutils.test_run_results(results): 31 for _, run_name, _, r in resultutils.test_run_results(results):
32 if args.list_ptest:
33 print('\n'.join(sorted(r['ptestresult.sections'].keys())))
34
37 if args.dump_ptest: 35 if args.dump_ptest:
38 for sectname in ['ptestresult.sections', 'ltpposixresult.sections', 'ltpresult.sections']: 36 for sectname in ['ptestresult.sections', 'ltpposixresult.sections', 'ltpresult.sections']:
39 if sectname in r: 37 if sectname in r:
@@ -48,6 +46,9 @@ def log(args, logger):
48 46
49 os.makedirs(dest_dir, exist_ok=True) 47 os.makedirs(dest_dir, exist_ok=True)
50 dest = os.path.join(dest_dir, '%s.log' % name) 48 dest = os.path.join(dest_dir, '%s.log' % name)
49 if os.path.exists(dest):
50 print("Overlapping ptest logs found, skipping %s. The '--prepend-run' option would avoid this" % name)
51 continue
51 print(dest) 52 print(dest)
52 with open(dest, 'w') as f: 53 with open(dest, 'w') as f:
53 f.write(logdata) 54 f.write(logdata)
@@ -86,6 +87,8 @@ def register_commands(subparsers):
86 parser.set_defaults(func=log) 87 parser.set_defaults(func=log)
87 parser.add_argument('source', 88 parser.add_argument('source',
88 help='the results file/directory/URL to import') 89 help='the results file/directory/URL to import')
90 parser.add_argument('--list-ptest', action='store_true',
91 help='list the ptest test names')
89 parser.add_argument('--ptest', action='append', default=[], 92 parser.add_argument('--ptest', action='append', default=[],
90 help='show logs for a ptest') 93 help='show logs for a ptest')
91 parser.add_argument('--dump-ptest', metavar='DIR', 94 parser.add_argument('--dump-ptest', metavar='DIR',
diff --git a/scripts/lib/resulttool/regression.py b/scripts/lib/resulttool/regression.py
index 9f952951b3..10e7d13841 100644
--- a/scripts/lib/resulttool/regression.py
+++ b/scripts/lib/resulttool/regression.py
@@ -7,15 +7,209 @@
7# 7#
8 8
9import resulttool.resultutils as resultutils 9import resulttool.resultutils as resultutils
10import json
11 10
12from oeqa.utils.git import GitRepo 11from oeqa.utils.git import GitRepo
13import oeqa.utils.gitarchive as gitarchive 12import oeqa.utils.gitarchive as gitarchive
14 13
15def compare_result(logger, base_name, target_name, base_result, target_result): 14METADATA_MATCH_TABLE = {
15 "oeselftest": "OESELFTEST_METADATA"
16}
17
18OESELFTEST_METADATA_GUESS_TABLE={
19 "trigger-build-posttrigger": {
20 "run_all_tests": False,
21 "run_tests":["buildoptions.SourceMirroring.test_yocto_source_mirror"],
22 "skips": None,
23 "machine": None,
24 "select_tags":None,
25 "exclude_tags": None
26 },
27 "reproducible": {
28 "run_all_tests": False,
29 "run_tests":["reproducible"],
30 "skips": None,
31 "machine": None,
32 "select_tags":None,
33 "exclude_tags": None
34 },
35 "arch-qemu-quick": {
36 "run_all_tests": True,
37 "run_tests":None,
38 "skips": None,
39 "machine": None,
40 "select_tags":["machine"],
41 "exclude_tags": None
42 },
43 "arch-qemu-full-x86-or-x86_64": {
44 "run_all_tests": True,
45 "run_tests":None,
46 "skips": None,
47 "machine": None,
48 "select_tags":["machine", "toolchain-system"],
49 "exclude_tags": None
50 },
51 "arch-qemu-full-others": {
52 "run_all_tests": True,
53 "run_tests":None,
54 "skips": None,
55 "machine": None,
56 "select_tags":["machine", "toolchain-user"],
57 "exclude_tags": None
58 },
59 "selftest": {
60 "run_all_tests": True,
61 "run_tests":None,
62 "skips": ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror", "reproducible"],
63 "machine": None,
64 "select_tags":None,
65 "exclude_tags": ["machine", "toolchain-system", "toolchain-user"]
66 },
67 "bringup": {
68 "run_all_tests": True,
69 "run_tests":None,
70 "skips": ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror"],
71 "machine": None,
72 "select_tags":None,
73 "exclude_tags": ["machine", "toolchain-system", "toolchain-user"]
74 }
75}
76
77STATUS_STRINGS = {
78 "None": "No matching test result"
79}
80
81REGRESSIONS_DISPLAY_LIMIT=50
82
83MISSING_TESTS_BANNER = "-------------------------- Missing tests --------------------------"
84ADDITIONAL_DATA_BANNER = "--------------------- Matches and improvements --------------------"
85
86def test_has_at_least_one_matching_tag(test, tag_list):
87 return "oetags" in test and any(oetag in tag_list for oetag in test["oetags"])
88
89def all_tests_have_at_least_one_matching_tag(results, tag_list):
90 return all(test_has_at_least_one_matching_tag(test_result, tag_list) or test_name.startswith("ptestresult") for (test_name, test_result) in results.items())
91
92def any_test_have_any_matching_tag(results, tag_list):
93 return any(test_has_at_least_one_matching_tag(test, tag_list) for test in results.values())
94
95def have_skipped_test(result, test_prefix):
96 return all( result[test]['status'] == "SKIPPED" for test in result if test.startswith(test_prefix))
97
98def have_all_tests_skipped(result, test_prefixes_list):
99 return all(have_skipped_test(result, test_prefix) for test_prefix in test_prefixes_list)
100
101def guess_oeselftest_metadata(results):
102 """
103 When an oeselftest test result is lacking OESELFTEST_METADATA, we can try to guess it based on results content.
104 Check results for specific values (absence/presence of oetags, number and name of executed tests...),
105 and if it matches one of known configuration from autobuilder configuration, apply guessed OSELFTEST_METADATA
106 to it to allow proper test filtering.
107 This guessing process is tightly coupled to config.json in autobuilder. It should trigger less and less,
108 as new tests will have OESELFTEST_METADATA properly appended at test reporting time
109 """
110
111 if len(results) == 1 and "buildoptions.SourceMirroring.test_yocto_source_mirror" in results:
112 return OESELFTEST_METADATA_GUESS_TABLE['trigger-build-posttrigger']
113 elif all(result.startswith("reproducible") for result in results):
114 return OESELFTEST_METADATA_GUESS_TABLE['reproducible']
115 elif all_tests_have_at_least_one_matching_tag(results, ["machine"]):
116 return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-quick']
117 elif all_tests_have_at_least_one_matching_tag(results, ["machine", "toolchain-system"]):
118 return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-full-x86-or-x86_64']
119 elif all_tests_have_at_least_one_matching_tag(results, ["machine", "toolchain-user"]):
120 return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-full-others']
121 elif not any_test_have_any_matching_tag(results, ["machine", "toolchain-user", "toolchain-system"]):
122 if have_all_tests_skipped(results, ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror", "reproducible"]):
123 return OESELFTEST_METADATA_GUESS_TABLE['selftest']
124 elif have_all_tests_skipped(results, ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror"]):
125 return OESELFTEST_METADATA_GUESS_TABLE['bringup']
126
127 return None
128
129
130def metadata_matches(base_configuration, target_configuration):
131 """
132 For passed base and target, check test type. If test type matches one of
133 properties described in METADATA_MATCH_TABLE, compare metadata if it is
134 present in base. Return true if metadata matches, or if base lacks some
135 data (either TEST_TYPE or the corresponding metadata)
136 """
137 test_type = base_configuration.get('TEST_TYPE')
138 if test_type not in METADATA_MATCH_TABLE:
139 return True
140
141 metadata_key = METADATA_MATCH_TABLE.get(test_type)
142 if target_configuration.get(metadata_key) != base_configuration.get(metadata_key):
143 return False
144
145 return True
146
147
148def machine_matches(base_configuration, target_configuration):
149 return base_configuration.get('MACHINE') == target_configuration.get('MACHINE')
150
151
152def can_be_compared(logger, base, target):
153 """
154 Some tests are not relevant to be compared, for example some oeselftest
155 run with different tests sets or parameters. Return true if tests can be
156 compared
157 """
158 ret = True
159 base_configuration = base['configuration']
160 target_configuration = target['configuration']
161
162 # Older test results lack proper OESELFTEST_METADATA: if not present, try to guess it based on tests results.
163 if base_configuration.get('TEST_TYPE') == 'oeselftest' and 'OESELFTEST_METADATA' not in base_configuration:
164 guess = guess_oeselftest_metadata(base['result'])
165 if guess is None:
166 logger.error(f"ERROR: did not manage to guess oeselftest metadata for {base_configuration['STARTTIME']}")
167 else:
168 logger.debug(f"Enriching {base_configuration['STARTTIME']} with {guess}")
169 base_configuration['OESELFTEST_METADATA'] = guess
170 if target_configuration.get('TEST_TYPE') == 'oeselftest' and 'OESELFTEST_METADATA' not in target_configuration:
171 guess = guess_oeselftest_metadata(target['result'])
172 if guess is None:
173 logger.error(f"ERROR: did not manage to guess oeselftest metadata for {target_configuration['STARTTIME']}")
174 else:
175 logger.debug(f"Enriching {target_configuration['STARTTIME']} with {guess}")
176 target_configuration['OESELFTEST_METADATA'] = guess
177
178 # Test runs with LTP results in should only be compared with other runs with LTP tests in them
179 if base_configuration.get('TEST_TYPE') == 'runtime' and any(result.startswith("ltpresult") for result in base['result']):
180 ret = target_configuration.get('TEST_TYPE') == 'runtime' and any(result.startswith("ltpresult") for result in target['result'])
181
182 return ret and metadata_matches(base_configuration, target_configuration) \
183 and machine_matches(base_configuration, target_configuration)
184
185def get_status_str(raw_status):
186 raw_status_lower = raw_status.lower() if raw_status else "None"
187 return STATUS_STRINGS.get(raw_status_lower, raw_status)
188
189def get_additional_info_line(new_pass_count, new_tests):
190 result=[]
191 if new_tests:
192 result.append(f'+{new_tests} test(s) present')
193 if new_pass_count:
194 result.append(f'+{new_pass_count} test(s) now passing')
195
196 if not result:
197 return ""
198
199 return ' -> ' + ', '.join(result) + '\n'
200
201def compare_result(logger, base_name, target_name, base_result, target_result, display_limit=None):
16 base_result = base_result.get('result') 202 base_result = base_result.get('result')
17 target_result = target_result.get('result') 203 target_result = target_result.get('result')
18 result = {} 204 result = {}
205 new_tests = 0
206 regressions = {}
207 resultstring = ""
208 new_tests = 0
209 new_pass_count = 0
210
211 display_limit = int(display_limit) if display_limit else REGRESSIONS_DISPLAY_LIMIT
212
19 if base_result and target_result: 213 if base_result and target_result:
20 for k in base_result: 214 for k in base_result:
21 base_testcase = base_result[k] 215 base_testcase = base_result[k]
@@ -27,12 +221,47 @@ def compare_result(logger, base_name, target_name, base_result, target_result):
27 result[k] = {'base': base_status, 'target': target_status} 221 result[k] = {'base': base_status, 'target': target_status}
28 else: 222 else:
29 logger.error('Failed to retrieved base test case status: %s' % k) 223 logger.error('Failed to retrieved base test case status: %s' % k)
224
225 # Also count new tests that were not present in base results: it
226 # could be newly added tests, but it could also highlights some tests
227 # renames or fixed faulty ptests
228 for k in target_result:
229 if k not in base_result:
230 new_tests += 1
30 if result: 231 if result:
31 resultstring = "Regression: %s\n %s\n" % (base_name, target_name) 232 new_pass_count = sum(test['target'] is not None and test['target'].startswith("PASS") for test in result.values())
32 for k in sorted(result): 233 # Print a regression report only if at least one test has a regression status (FAIL, SKIPPED, absent...)
33 resultstring += ' %s: %s -> %s\n' % (k, result[k]['base'], result[k]['target']) 234 if new_pass_count < len(result):
235 resultstring = "Regression: %s\n %s\n" % (base_name, target_name)
236 for k in sorted(result):
237 if not result[k]['target'] or not result[k]['target'].startswith("PASS"):
238 # Differentiate each ptest kind when listing regressions
239 key_parts = k.split('.')
240 key = '.'.join(key_parts[:2]) if k.startswith('ptest') else key_parts[0]
241 # Append new regression to corresponding test family
242 regressions[key] = regressions.setdefault(key, []) + [' %s: %s -> %s\n' % (k, get_status_str(result[k]['base']), get_status_str(result[k]['target']))]
243 resultstring += f" Total: {sum([len(regressions[r]) for r in regressions])} new regression(s):\n"
244 for k in regressions:
245 resultstring += f" {len(regressions[k])} regression(s) for {k}\n"
246 count_to_print=min([display_limit, len(regressions[k])]) if display_limit > 0 else len(regressions[k])
247 resultstring += ''.join(regressions[k][:count_to_print])
248 if count_to_print < len(regressions[k]):
249 resultstring+=' [...]\n'
250 if new_pass_count > 0:
251 resultstring += f' Additionally, {new_pass_count} previously failing test(s) is/are now passing\n'
252 if new_tests > 0:
253 resultstring += f' Additionally, {new_tests} new test(s) is/are present\n'
254 else:
255 resultstring = "%s\n%s\n" % (base_name, target_name)
256 result = None
34 else: 257 else:
35 resultstring = "Match: %s\n %s" % (base_name, target_name) 258 resultstring = "%s\n%s\n" % (base_name, target_name)
259
260 if not result:
261 additional_info = get_additional_info_line(new_pass_count, new_tests)
262 if additional_info:
263 resultstring += additional_info
264
36 return result, resultstring 265 return result, resultstring
37 266
38def get_results(logger, source): 267def get_results(logger, source):
@@ -44,12 +273,38 @@ def regression(args, logger):
44 273
45 regression_common(args, logger, base_results, target_results) 274 regression_common(args, logger, base_results, target_results)
46 275
276# Some test case naming is poor and contains random strings, particularly lttng/babeltrace.
277# Truncating the test names works since they contain file and line number identifiers
278# which allows us to match them without the random components.
279def fixup_ptest_names(results, logger):
280 for r in results:
281 for i in results[r]:
282 tests = list(results[r][i]['result'].keys())
283 for test in tests:
284 new = None
285 if test.startswith(("ptestresult.lttng-tools.", "ptestresult.babeltrace.", "ptestresult.babeltrace2")) and "_-_" in test:
286 new = test.split("_-_")[0]
287 elif test.startswith(("ptestresult.curl.")) and "__" in test:
288 new = test.split("__")[0]
289 elif test.startswith(("ptestresult.dbus.")) and "__" in test:
290 new = test.split("__")[0]
291 elif test.startswith("ptestresult.binutils") and "build-st-" in test:
292 new = test.split(" ")[0]
293 elif test.startswith("ptestresult.gcc") and "/tmp/runtest." in test:
294 new = ".".join(test.split(".")[:2])
295 if new:
296 results[r][i]['result'][new] = results[r][i]['result'][test]
297 del results[r][i]['result'][test]
298
47def regression_common(args, logger, base_results, target_results): 299def regression_common(args, logger, base_results, target_results):
48 if args.base_result_id: 300 if args.base_result_id:
49 base_results = resultutils.filter_resultsdata(base_results, args.base_result_id) 301 base_results = resultutils.filter_resultsdata(base_results, args.base_result_id)
50 if args.target_result_id: 302 if args.target_result_id:
51 target_results = resultutils.filter_resultsdata(target_results, args.target_result_id) 303 target_results = resultutils.filter_resultsdata(target_results, args.target_result_id)
52 304
305 fixup_ptest_names(base_results, logger)
306 fixup_ptest_names(target_results, logger)
307
53 matches = [] 308 matches = []
54 regressions = [] 309 regressions = []
55 notfound = [] 310 notfound = []
@@ -62,7 +317,9 @@ def regression_common(args, logger, base_results, target_results):
62 # removing any pairs which match 317 # removing any pairs which match
63 for c in base.copy(): 318 for c in base.copy():
64 for b in target.copy(): 319 for b in target.copy():
65 res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b]) 320 if not can_be_compared(logger, base_results[a][c], target_results[a][b]):
321 continue
322 res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b], args.limit)
66 if not res: 323 if not res:
67 matches.append(resstr) 324 matches.append(resstr)
68 base.remove(c) 325 base.remove(c)
@@ -71,15 +328,18 @@ def regression_common(args, logger, base_results, target_results):
71 # Should only now see regressions, we may not be able to match multiple pairs directly 328 # Should only now see regressions, we may not be able to match multiple pairs directly
72 for c in base: 329 for c in base:
73 for b in target: 330 for b in target:
74 res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b]) 331 if not can_be_compared(logger, base_results[a][c], target_results[a][b]):
332 continue
333 res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b], args.limit)
75 if res: 334 if res:
76 regressions.append(resstr) 335 regressions.append(resstr)
77 else: 336 else:
78 notfound.append("%s not found in target" % a) 337 notfound.append("%s not found in target" % a)
79 print("\n".join(sorted(matches)))
80 print("\n".join(sorted(regressions))) 338 print("\n".join(sorted(regressions)))
339 print("\n" + MISSING_TESTS_BANNER + "\n")
81 print("\n".join(sorted(notfound))) 340 print("\n".join(sorted(notfound)))
82 341 print("\n" + ADDITIONAL_DATA_BANNER + "\n")
342 print("\n".join(sorted(matches)))
83 return 0 343 return 0
84 344
85def regression_git(args, logger): 345def regression_git(args, logger):
@@ -183,4 +443,5 @@ def register_commands(subparsers):
183 parser_build.add_argument('--commit-number', help="Revision number to search for, redundant if --commit is specified") 443 parser_build.add_argument('--commit-number', help="Revision number to search for, redundant if --commit is specified")
184 parser_build.add_argument('--commit2', help="Revision to compare with") 444 parser_build.add_argument('--commit2', help="Revision to compare with")
185 parser_build.add_argument('--commit-number2', help="Revision number to compare with, redundant if --commit2 is specified") 445 parser_build.add_argument('--commit-number2', help="Revision number to compare with, redundant if --commit2 is specified")
446 parser_build.add_argument('-l', '--limit', default=REGRESSIONS_DISPLAY_LIMIT, help="Maximum number of changes to display per test. Can be set to 0 to print all changes")
186 447
diff --git a/scripts/lib/resulttool/report.py b/scripts/lib/resulttool/report.py
index f0ca50ebe2..a349510ab8 100644
--- a/scripts/lib/resulttool/report.py
+++ b/scripts/lib/resulttool/report.py
@@ -176,7 +176,10 @@ class ResultsTextReport(object):
176 vals['sort'] = line['testseries'] + "_" + line['result_id'] 176 vals['sort'] = line['testseries'] + "_" + line['result_id']
177 vals['failed_testcases'] = line['failed_testcases'] 177 vals['failed_testcases'] = line['failed_testcases']
178 for k in cols: 178 for k in cols:
179 vals[k] = "%d (%s%%)" % (line[k], format(line[k] / total_tested * 100, '.0f')) 179 if total_tested:
180 vals[k] = "%d (%s%%)" % (line[k], format(line[k] / total_tested * 100, '.0f'))
181 else:
182 vals[k] = "0 (0%)"
180 for k in maxlen: 183 for k in maxlen:
181 if k in vals and len(vals[k]) > maxlen[k]: 184 if k in vals and len(vals[k]) > maxlen[k]:
182 maxlen[k] = len(vals[k]) 185 maxlen[k] = len(vals[k])
diff --git a/scripts/lib/resulttool/resultutils.py b/scripts/lib/resulttool/resultutils.py
index 8917022d36..c5521d81bd 100644
--- a/scripts/lib/resulttool/resultutils.py
+++ b/scripts/lib/resulttool/resultutils.py
@@ -58,7 +58,11 @@ def append_resultsdata(results, f, configmap=store_map, configvars=extra_configv
58 testseries = posixpath.basename(posixpath.dirname(url.path)) 58 testseries = posixpath.basename(posixpath.dirname(url.path))
59 else: 59 else:
60 with open(f, "r") as filedata: 60 with open(f, "r") as filedata:
61 data = json.load(filedata) 61 try:
62 data = json.load(filedata)
63 except json.decoder.JSONDecodeError:
64 print("Cannot decode {}. Possible corruption. Skipping.".format(f))
65 data = ""
62 testseries = os.path.basename(os.path.dirname(f)) 66 testseries = os.path.basename(os.path.dirname(f))
63 else: 67 else:
64 data = f 68 data = f
@@ -142,7 +146,7 @@ def generic_get_log(sectionname, results, section):
142 return decode_log(ptest['log']) 146 return decode_log(ptest['log'])
143 147
144def ptestresult_get_log(results, section): 148def ptestresult_get_log(results, section):
145 return generic_get_log('ptestresuls.sections', results, section) 149 return generic_get_log('ptestresult.sections', results, section)
146 150
147def generic_get_rawlogs(sectname, results): 151def generic_get_rawlogs(sectname, results):
148 if sectname not in results: 152 if sectname not in results:
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py
index f92255d8dc..f23e53cba9 100644
--- a/scripts/lib/scriptutils.py
+++ b/scripts/lib/scriptutils.py
@@ -5,7 +5,6 @@
5# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
6# 6#
7 7
8import argparse
9import glob 8import glob
10import logging 9import logging
11import os 10import os
@@ -18,13 +17,14 @@ import sys
18import tempfile 17import tempfile
19import threading 18import threading
20import importlib 19import importlib
21from importlib import machinery 20import importlib.machinery
21import importlib.util
22 22
23class KeepAliveStreamHandler(logging.StreamHandler): 23class KeepAliveStreamHandler(logging.StreamHandler):
24 def __init__(self, keepalive=True, **kwargs): 24 def __init__(self, keepalive=True, **kwargs):
25 super().__init__(**kwargs) 25 super().__init__(**kwargs)
26 if keepalive is True: 26 if keepalive is True:
27 keepalive = 5000 # default timeout 27 keepalive = 5000 # default timeout
28 self._timeout = threading.Condition() 28 self._timeout = threading.Condition()
29 self._stop = False 29 self._stop = False
30 30
@@ -35,9 +35,9 @@ class KeepAliveStreamHandler(logging.StreamHandler):
35 with self._timeout: 35 with self._timeout:
36 if not self._timeout.wait(keepalive): 36 if not self._timeout.wait(keepalive):
37 self.emit(logging.LogRecord("keepalive", logging.INFO, 37 self.emit(logging.LogRecord("keepalive", logging.INFO,
38 None, None, "Keepalive message", None, None)) 38 None, None, "Keepalive message", None, None))
39 39
40 self._thread = threading.Thread(target = thread, daemon = True) 40 self._thread = threading.Thread(target=thread, daemon=True)
41 self._thread.start() 41 self._thread.start()
42 42
43 def close(self): 43 def close(self):
@@ -71,18 +71,19 @@ def logger_setup_color(logger, color='auto'):
71 71
72 for handler in logger.handlers: 72 for handler in logger.handlers:
73 if (isinstance(handler, logging.StreamHandler) and 73 if (isinstance(handler, logging.StreamHandler) and
74 isinstance(handler.formatter, BBLogFormatter)): 74 isinstance(handler.formatter, BBLogFormatter)):
75 if color == 'always' or (color == 'auto' and handler.stream.isatty()): 75 if color == 'always' or (color == 'auto' and handler.stream.isatty()):
76 handler.formatter.enable_color() 76 handler.formatter.enable_color()
77 77
78 78
79def load_plugins(logger, plugins, pluginpath): 79def load_plugins(logger, plugins, pluginpath):
80
81 def load_plugin(name): 80 def load_plugin(name):
82 logger.debug('Loading plugin %s' % name) 81 logger.debug('Loading plugin %s' % name)
83 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) 82 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath])
84 if spec: 83 if spec:
85 return spec.loader.load_module() 84 mod = importlib.util.module_from_spec(spec)
85 spec.loader.exec_module(mod)
86 return mod
86 87
87 def plugin_name(filename): 88 def plugin_name(filename):
88 return os.path.splitext(os.path.basename(filename))[0] 89 return os.path.splitext(os.path.basename(filename))[0]
@@ -176,6 +177,7 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr
176 f.write('BB_STRICT_CHECKSUM = "ignore"\n') 177 f.write('BB_STRICT_CHECKSUM = "ignore"\n')
177 f.write('SRC_URI = "%s"\n' % srcuri) 178 f.write('SRC_URI = "%s"\n' % srcuri)
178 f.write('SRCREV = "%s"\n' % srcrev) 179 f.write('SRCREV = "%s"\n' % srcrev)
180 f.write('PV = "0.0+"\n')
179 f.write('WORKDIR = "%s"\n' % tmpworkdir) 181 f.write('WORKDIR = "%s"\n' % tmpworkdir)
180 # Set S out of the way so it doesn't get created under the workdir 182 # Set S out of the way so it doesn't get created under the workdir
181 f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) 183 f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc'))
@@ -215,7 +217,8 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr
215 pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE'] 217 pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE']
216 for pathvar in pathvars: 218 for pathvar in pathvars:
217 path = rd.getVar(pathvar) 219 path = rd.getVar(pathvar)
218 shutil.rmtree(path) 220 if os.path.exists(path):
221 shutil.rmtree(path)
219 finally: 222 finally:
220 if fetchrecipe: 223 if fetchrecipe:
221 try: 224 try:
@@ -274,6 +277,6 @@ def filter_src_subdirs(pth):
274 Used by devtool and recipetool. 277 Used by devtool and recipetool.
275 """ 278 """
276 dirlist = os.listdir(pth) 279 dirlist = os.listdir(pth)
277 filterout = ['git.indirectionsymlink', 'source-date-epoch'] 280 filterout = ['git.indirectionsymlink', 'source-date-epoch', 'sstate-install-recipe_qa']
278 dirlist = [x for x in dirlist if x not in filterout] 281 dirlist = [x for x in dirlist if x not in filterout]
279 return dirlist 282 return dirlist
diff --git a/scripts/lib/wic/canned-wks/common.wks.inc b/scripts/lib/wic/canned-wks/common.wks.inc
index 4fd29fa8c1..89880b417b 100644
--- a/scripts/lib/wic/canned-wks/common.wks.inc
+++ b/scripts/lib/wic/canned-wks/common.wks.inc
@@ -1,3 +1,3 @@
1# This file is included into 3 canned wks files from this directory 1# This file is included into 3 canned wks files from this directory
2part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 2part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024
3part / --source rootfs --use-uuid --fstype=ext4 --mkfs-extraopts "-T default" --label platform --align 1024 3part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024
diff --git a/scripts/lib/wic/canned-wks/directdisk-gpt.wks b/scripts/lib/wic/canned-wks/directdisk-gpt.wks
index cf16c0c30b..8d7d8de6ea 100644
--- a/scripts/lib/wic/canned-wks/directdisk-gpt.wks
+++ b/scripts/lib/wic/canned-wks/directdisk-gpt.wks
@@ -4,7 +4,7 @@
4 4
5 5
6part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 6part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024
7part / --source rootfs --ondisk sda --fstype=ext4 --mkfs-extraopts "-T default" --label platform --align 1024 --use-uuid 7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
8 8
9bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8" 9bootloader --ptable gpt --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 console=ttyS0,115200n8"
10 10
diff --git a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
index 7300e65e32..2fd286ff98 100644
--- a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
+++ b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
@@ -1,3 +1,3 @@
1bootloader --ptable gpt 1bootloader --ptable gpt
2part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.0 2part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.1
3part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/ 3part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/
diff --git a/scripts/lib/wic/canned-wks/mkefidisk.wks b/scripts/lib/wic/canned-wks/mkefidisk.wks
index d1878e23e5..9f534fe184 100644
--- a/scripts/lib/wic/canned-wks/mkefidisk.wks
+++ b/scripts/lib/wic/canned-wks/mkefidisk.wks
@@ -4,7 +4,7 @@
4 4
5part /boot --source bootimg-efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024 5part /boot --source bootimg-efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024
6 6
7part / --source rootfs --ondisk sda --fstype=ext4 --mkfs-extraopts "-T default" --label platform --align 1024 --use-uuid 7part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
8 8
9part swap --ondisk sda --size 44 --label swap1 --fstype=swap 9part swap --ondisk sda --size 44 --label swap1 --fstype=swap
10 10
diff --git a/scripts/lib/wic/canned-wks/qemuloongarch.wks b/scripts/lib/wic/canned-wks/qemuloongarch.wks
new file mode 100644
index 0000000000..8465c7a8c0
--- /dev/null
+++ b/scripts/lib/wic/canned-wks/qemuloongarch.wks
@@ -0,0 +1,3 @@
1# short-description: Create qcow2 image for LoongArch QEMU machines
2
3part / --source rootfs --fstype=ext4 --label root --align 4096 --size 5G
diff --git a/scripts/lib/wic/canned-wks/qemux86-directdisk.wks b/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
index 22b45217f1..808997611a 100644
--- a/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
+++ b/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
@@ -4,5 +4,5 @@
4 4
5include common.wks.inc 5include common.wks.inc
6 6
7bootloader --timeout=0 --append="rw oprofile.timer=1 rootfstype=ext4 " 7bootloader --timeout=0 --append="rw oprofile.timer=1 rootfstype=ext4 console=tty console=ttyS0 "
8 8
diff --git a/scripts/lib/wic/engine.py b/scripts/lib/wic/engine.py
index 018815b966..674ccfc244 100644
--- a/scripts/lib/wic/engine.py
+++ b/scripts/lib/wic/engine.py
@@ -19,10 +19,10 @@ import os
19import tempfile 19import tempfile
20import json 20import json
21import subprocess 21import subprocess
22import shutil
22import re 23import re
23 24
24from collections import namedtuple, OrderedDict 25from collections import namedtuple, OrderedDict
25from distutils.spawn import find_executable
26 26
27from wic import WicError 27from wic import WicError
28from wic.filemap import sparse_copy 28from wic.filemap import sparse_copy
@@ -245,7 +245,7 @@ class Disk:
245 for path in pathlist.split(':'): 245 for path in pathlist.split(':'):
246 self.paths = "%s%s:%s" % (native_sysroot, path, self.paths) 246 self.paths = "%s%s:%s" % (native_sysroot, path, self.paths)
247 247
248 self.parted = find_executable("parted", self.paths) 248 self.parted = shutil.which("parted", path=self.paths)
249 if not self.parted: 249 if not self.parted:
250 raise WicError("Can't find executable parted") 250 raise WicError("Can't find executable parted")
251 251
@@ -283,7 +283,7 @@ class Disk:
283 "resize2fs", "mkswap", "mkdosfs", "debugfs","blkid"): 283 "resize2fs", "mkswap", "mkdosfs", "debugfs","blkid"):
284 aname = "_%s" % name 284 aname = "_%s" % name
285 if aname not in self.__dict__: 285 if aname not in self.__dict__:
286 setattr(self, aname, find_executable(name, self.paths)) 286 setattr(self, aname, shutil.which(name, path=self.paths))
287 if aname not in self.__dict__ or self.__dict__[aname] is None: 287 if aname not in self.__dict__ or self.__dict__[aname] is None:
288 raise WicError("Can't find executable '{}'".format(name)) 288 raise WicError("Can't find executable '{}'".format(name))
289 return self.__dict__[aname] 289 return self.__dict__[aname]
diff --git a/scripts/lib/wic/filemap.py b/scripts/lib/wic/filemap.py
index 4d9da28172..85b39d5d74 100644
--- a/scripts/lib/wic/filemap.py
+++ b/scripts/lib/wic/filemap.py
@@ -46,6 +46,13 @@ def get_block_size(file_obj):
46 bsize = stat.st_blksize 46 bsize = stat.st_blksize
47 else: 47 else:
48 raise IOError("Unable to determine block size") 48 raise IOError("Unable to determine block size")
49
50 # The logic in this script only supports a maximum of a 4KB
51 # block size
52 max_block_size = 4 * 1024
53 if bsize > max_block_size:
54 bsize = max_block_size
55
49 return bsize 56 return bsize
50 57
51class ErrorNotSupp(Exception): 58class ErrorNotSupp(Exception):
diff --git a/scripts/lib/wic/help.py b/scripts/lib/wic/help.py
index bd3a2b97df..163535e431 100644
--- a/scripts/lib/wic/help.py
+++ b/scripts/lib/wic/help.py
@@ -637,7 +637,7 @@ DESCRIPTION
637 oe-core: directdisk.bbclass and mkefidisk.sh. The difference 637 oe-core: directdisk.bbclass and mkefidisk.sh. The difference
638 between wic and those examples is that with wic the functionality 638 between wic and those examples is that with wic the functionality
639 of those scripts is implemented by a general-purpose partitioning 639 of those scripts is implemented by a general-purpose partitioning
640 'language' based on Redhat kickstart syntax). 640 'language' based on Red Hat kickstart syntax).
641 641
642 The initial motivation and design considerations that lead to the 642 The initial motivation and design considerations that lead to the
643 current tool are described exhaustively in Yocto Bug #3847 643 current tool are described exhaustively in Yocto Bug #3847
@@ -840,8 +840,8 @@ DESCRIPTION
840 meanings. The commands are based on the Fedora kickstart 840 meanings. The commands are based on the Fedora kickstart
841 documentation but with modifications to reflect wic capabilities. 841 documentation but with modifications to reflect wic capabilities.
842 842
843 http://fedoraproject.org/wiki/Anaconda/Kickstart#part_or_partition 843 https://pykickstart.readthedocs.io/en/latest/kickstart-docs.html#part-or-partition
844 http://fedoraproject.org/wiki/Anaconda/Kickstart#bootloader 844 https://pykickstart.readthedocs.io/en/latest/kickstart-docs.html#bootloader
845 845
846 Commands 846 Commands
847 847
@@ -930,6 +930,7 @@ DESCRIPTION
930 ext4 930 ext4
931 btrfs 931 btrfs
932 squashfs 932 squashfs
933 erofs
933 swap 934 swap
934 935
935 --fsoptions: Specifies a free-form string of options to be 936 --fsoptions: Specifies a free-form string of options to be
@@ -939,6 +940,12 @@ DESCRIPTION
939 quotes. If not specified, the default string is 940 quotes. If not specified, the default string is
940 "defaults". 941 "defaults".
941 942
943 --fspassno: Specifies the order in which filesystem checks are done
944 at boot time by fsck. See fs_passno parameter of
945 fstab(5). This parameter will be copied into the
946 /etc/fstab file of the installed system. If not
947 specified the default value of "0" will be used.
948
942 --label label: Specifies the label to give to the filesystem 949 --label label: Specifies the label to give to the filesystem
943 to be made on the partition. If the given 950 to be made on the partition. If the given
944 label is already in use by another filesystem, 951 label is already in use by another filesystem,
@@ -990,6 +997,9 @@ DESCRIPTION
990 multiple partitions and we want to keep the right 997 multiple partitions and we want to keep the right
991 permissions and usernames in all the partitions. 998 permissions and usernames in all the partitions.
992 999
1000 --no-fstab-update: This option is specific to wic. It does not update the
1001 '/etc/fstab' stock file for the given partition.
1002
993 --extra-space: This option is specific to wic. It adds extra 1003 --extra-space: This option is specific to wic. It adds extra
994 space after the space filled by the content 1004 space after the space filled by the content
995 of the partition. The final size can go 1005 of the partition. The final size can go
@@ -1108,7 +1118,7 @@ COMMAND:
1108TOPIC: 1118TOPIC:
1109 overview - Presents an overall overview of Wic 1119 overview - Presents an overall overview of Wic
1110 plugins - Presents an overview and API for Wic plugins 1120 plugins - Presents an overview and API for Wic plugins
1111 kickstart - Presents a Wic kicstart file reference 1121 kickstart - Presents a Wic kickstart file reference
1112 1122
1113 1123
1114Examples: 1124Examples:
diff --git a/scripts/lib/wic/ksparser.py b/scripts/lib/wic/ksparser.py
index 3eb669da39..7ef3dc83dd 100644
--- a/scripts/lib/wic/ksparser.py
+++ b/scripts/lib/wic/ksparser.py
@@ -155,9 +155,11 @@ class KickStart():
155 part.add_argument('--change-directory') 155 part.add_argument('--change-directory')
156 part.add_argument("--extra-space", type=sizetype("M")) 156 part.add_argument("--extra-space", type=sizetype("M"))
157 part.add_argument('--fsoptions', dest='fsopts') 157 part.add_argument('--fsoptions', dest='fsopts')
158 part.add_argument('--fspassno', dest='fspassno')
158 part.add_argument('--fstype', default='vfat', 159 part.add_argument('--fstype', default='vfat',
159 choices=('ext2', 'ext3', 'ext4', 'btrfs', 160 choices=('ext2', 'ext3', 'ext4', 'btrfs',
160 'squashfs', 'vfat', 'msdos', 'swap')) 161 'squashfs', 'vfat', 'msdos', 'erofs',
162 'swap', 'none'))
161 part.add_argument('--mkfs-extraopts', default='') 163 part.add_argument('--mkfs-extraopts', default='')
162 part.add_argument('--label') 164 part.add_argument('--label')
163 part.add_argument('--use-label', action='store_true') 165 part.add_argument('--use-label', action='store_true')
@@ -169,6 +171,7 @@ class KickStart():
169 part.add_argument('--rootfs-dir') 171 part.add_argument('--rootfs-dir')
170 part.add_argument('--type', default='primary', 172 part.add_argument('--type', default='primary',
171 choices = ('primary', 'logical')) 173 choices = ('primary', 'logical'))
174 part.add_argument('--hidden', action='store_true')
172 175
173 # --size and --fixed-size cannot be specified together; options 176 # --size and --fixed-size cannot be specified together; options
174 # ----extra-space and --overhead-factor should also raise a parser 177 # ----extra-space and --overhead-factor should also raise a parser
@@ -184,11 +187,13 @@ class KickStart():
184 part.add_argument('--use-uuid', action='store_true') 187 part.add_argument('--use-uuid', action='store_true')
185 part.add_argument('--uuid') 188 part.add_argument('--uuid')
186 part.add_argument('--fsuuid') 189 part.add_argument('--fsuuid')
190 part.add_argument('--no-fstab-update', action='store_true')
191 part.add_argument('--mbr', action='store_true')
187 192
188 bootloader = subparsers.add_parser('bootloader') 193 bootloader = subparsers.add_parser('bootloader')
189 bootloader.add_argument('--append') 194 bootloader.add_argument('--append')
190 bootloader.add_argument('--configfile') 195 bootloader.add_argument('--configfile')
191 bootloader.add_argument('--ptable', choices=('msdos', 'gpt'), 196 bootloader.add_argument('--ptable', choices=('msdos', 'gpt', 'gpt-hybrid'),
192 default='msdos') 197 default='msdos')
193 bootloader.add_argument('--timeout', type=int) 198 bootloader.add_argument('--timeout', type=int)
194 bootloader.add_argument('--source') 199 bootloader.add_argument('--source')
@@ -229,6 +234,10 @@ class KickStart():
229 err = "%s:%d: SquashFS does not support LABEL" \ 234 err = "%s:%d: SquashFS does not support LABEL" \
230 % (confpath, lineno) 235 % (confpath, lineno)
231 raise KickStartError(err) 236 raise KickStartError(err)
237 # erofs does not support filesystem labels
238 if parsed.fstype == 'erofs' and parsed.label:
239 err = "%s:%d: erofs does not support LABEL" % (confpath, lineno)
240 raise KickStartError(err)
232 if parsed.fstype == 'msdos' or parsed.fstype == 'vfat': 241 if parsed.fstype == 'msdos' or parsed.fstype == 'vfat':
233 if parsed.fsuuid: 242 if parsed.fsuuid:
234 if parsed.fsuuid.upper().startswith('0X'): 243 if parsed.fsuuid.upper().startswith('0X'):
diff --git a/scripts/lib/wic/misc.py b/scripts/lib/wic/misc.py
index 57c042c503..1a7c140fa6 100644
--- a/scripts/lib/wic/misc.py
+++ b/scripts/lib/wic/misc.py
@@ -16,16 +16,16 @@ import logging
16import os 16import os
17import re 17import re
18import subprocess 18import subprocess
19import shutil
19 20
20from collections import defaultdict 21from collections import defaultdict
21from distutils import spawn
22 22
23from wic import WicError 23from wic import WicError
24 24
25logger = logging.getLogger('wic') 25logger = logging.getLogger('wic')
26 26
27# executable -> recipe pairs for exec_native_cmd 27# executable -> recipe pairs for exec_native_cmd
28NATIVE_RECIPES = {"bmaptool": "bmap-tools", 28NATIVE_RECIPES = {"bmaptool": "bmaptool",
29 "dumpe2fs": "e2fsprogs", 29 "dumpe2fs": "e2fsprogs",
30 "grub-mkimage": "grub-efi", 30 "grub-mkimage": "grub-efi",
31 "isohybrid": "syslinux", 31 "isohybrid": "syslinux",
@@ -36,6 +36,7 @@ NATIVE_RECIPES = {"bmaptool": "bmap-tools",
36 "mkdosfs": "dosfstools", 36 "mkdosfs": "dosfstools",
37 "mkisofs": "cdrtools", 37 "mkisofs": "cdrtools",
38 "mkfs.btrfs": "btrfs-tools", 38 "mkfs.btrfs": "btrfs-tools",
39 "mkfs.erofs": "erofs-utils",
39 "mkfs.ext2": "e2fsprogs", 40 "mkfs.ext2": "e2fsprogs",
40 "mkfs.ext3": "e2fsprogs", 41 "mkfs.ext3": "e2fsprogs",
41 "mkfs.ext4": "e2fsprogs", 42 "mkfs.ext4": "e2fsprogs",
@@ -122,7 +123,7 @@ def find_executable(cmd, paths):
122 if provided and "%s-native" % recipe in provided: 123 if provided and "%s-native" % recipe in provided:
123 return True 124 return True
124 125
125 return spawn.find_executable(cmd, paths) 126 return shutil.which(cmd, path=paths)
126 127
127def exec_native_cmd(cmd_and_args, native_sysroot, pseudo=""): 128def exec_native_cmd(cmd_and_args, native_sysroot, pseudo=""):
128 """ 129 """
@@ -140,11 +141,12 @@ def exec_native_cmd(cmd_and_args, native_sysroot, pseudo=""):
140 cmd_and_args = pseudo + cmd_and_args 141 cmd_and_args = pseudo + cmd_and_args
141 142
142 hosttools_dir = get_bitbake_var("HOSTTOOLS_DIR") 143 hosttools_dir = get_bitbake_var("HOSTTOOLS_DIR")
144 target_sys = get_bitbake_var("TARGET_SYS")
143 145
144 native_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin:%s/bin:%s" % \ 146 native_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin:%s/usr/bin/%s:%s/bin:%s" % \
145 (native_sysroot, native_sysroot, 147 (native_sysroot, native_sysroot,
146 native_sysroot, native_sysroot, 148 native_sysroot, native_sysroot, target_sys,
147 hosttools_dir) 149 native_sysroot, hosttools_dir)
148 150
149 native_cmd_and_args = "export PATH=%s:$PATH;%s" % \ 151 native_cmd_and_args = "export PATH=%s:$PATH;%s" % \
150 (native_paths, cmd_and_args) 152 (native_paths, cmd_and_args)
diff --git a/scripts/lib/wic/partition.py b/scripts/lib/wic/partition.py
index 76d144d12d..795707ec5d 100644
--- a/scripts/lib/wic/partition.py
+++ b/scripts/lib/wic/partition.py
@@ -33,6 +33,7 @@ class Partition():
33 self.include_path = args.include_path 33 self.include_path = args.include_path
34 self.change_directory = args.change_directory 34 self.change_directory = args.change_directory
35 self.fsopts = args.fsopts 35 self.fsopts = args.fsopts
36 self.fspassno = args.fspassno
36 self.fstype = args.fstype 37 self.fstype = args.fstype
37 self.label = args.label 38 self.label = args.label
38 self.use_label = args.use_label 39 self.use_label = args.use_label
@@ -54,9 +55,12 @@ class Partition():
54 self.uuid = args.uuid 55 self.uuid = args.uuid
55 self.fsuuid = args.fsuuid 56 self.fsuuid = args.fsuuid
56 self.type = args.type 57 self.type = args.type
58 self.no_fstab_update = args.no_fstab_update
57 self.updated_fstab_path = None 59 self.updated_fstab_path = None
58 self.has_fstab = False 60 self.has_fstab = False
59 self.update_fstab_in_rootfs = False 61 self.update_fstab_in_rootfs = False
62 self.hidden = args.hidden
63 self.mbr = args.mbr
60 64
61 self.lineno = lineno 65 self.lineno = lineno
62 self.source_file = "" 66 self.source_file = ""
@@ -104,7 +108,7 @@ class Partition():
104 extra_blocks = self.extra_space 108 extra_blocks = self.extra_space
105 109
106 rootfs_size = actual_rootfs_size + extra_blocks 110 rootfs_size = actual_rootfs_size + extra_blocks
107 rootfs_size *= self.overhead_factor 111 rootfs_size = int(rootfs_size * self.overhead_factor)
108 112
109 logger.debug("Added %d extra blocks to %s to get to %d total blocks", 113 logger.debug("Added %d extra blocks to %s to get to %d total blocks",
110 extra_blocks, self.mountpoint, rootfs_size) 114 extra_blocks, self.mountpoint, rootfs_size)
@@ -131,6 +135,8 @@ class Partition():
131 self.update_fstab_in_rootfs = True 135 self.update_fstab_in_rootfs = True
132 136
133 if not self.source: 137 if not self.source:
138 if self.fstype == "none" or self.no_table:
139 return
134 if not self.size and not self.fixed_size: 140 if not self.size and not self.fixed_size:
135 raise WicError("The %s partition has a size of zero. Please " 141 raise WicError("The %s partition has a size of zero. Please "
136 "specify a non-zero --size/--fixed-size for that " 142 "specify a non-zero --size/--fixed-size for that "
@@ -141,9 +147,9 @@ class Partition():
141 native_sysroot) 147 native_sysroot)
142 self.source_file = "%s/fs.%s" % (cr_workdir, self.fstype) 148 self.source_file = "%s/fs.%s" % (cr_workdir, self.fstype)
143 else: 149 else:
144 if self.fstype == 'squashfs': 150 if self.fstype in ('squashfs', 'erofs'):
145 raise WicError("It's not possible to create empty squashfs " 151 raise WicError("It's not possible to create empty %s "
146 "partition '%s'" % (self.mountpoint)) 152 "partition '%s'" % (self.fstype, self.mountpoint))
147 153
148 rootfs = "%s/fs_%s.%s.%s" % (cr_workdir, self.label, 154 rootfs = "%s/fs_%s.%s.%s" % (cr_workdir, self.label,
149 self.lineno, self.fstype) 155 self.lineno, self.fstype)
@@ -170,7 +176,7 @@ class Partition():
170 # Split sourceparams string of the form key1=val1[,key2=val2,...] 176 # Split sourceparams string of the form key1=val1[,key2=val2,...]
171 # into a dict. Also accepts valueless keys i.e. without = 177 # into a dict. Also accepts valueless keys i.e. without =
172 splitted = self.sourceparams.split(',') 178 splitted = self.sourceparams.split(',')
173 srcparams_dict = dict(par.split('=', 1) for par in splitted if par) 179 srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par)
174 180
175 plugin = PluginMgr.get_plugins('source')[self.source] 181 plugin = PluginMgr.get_plugins('source')[self.source]
176 plugin.do_configure_partition(self, srcparams_dict, creator, 182 plugin.do_configure_partition(self, srcparams_dict, creator,
@@ -278,6 +284,20 @@ class Partition():
278 284
279 extraopts = self.mkfs_extraopts or "-F -i 8192" 285 extraopts = self.mkfs_extraopts or "-F -i 8192"
280 286
287 if os.getenv('SOURCE_DATE_EPOCH'):
288 sde_time = int(os.getenv('SOURCE_DATE_EPOCH'))
289 if pseudo:
290 pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo)
291 else:
292 pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time
293
294 # Set hash_seed to generate deterministic directory indexes
295 namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460")
296 if self.fsuuid:
297 namespace = uuid.UUID(self.fsuuid)
298 hash_seed = str(uuid.uuid5(namespace, str(sde_time)))
299 extraopts += " -E hash_seed=%s" % hash_seed
300
281 label_str = "" 301 label_str = ""
282 if self.label: 302 if self.label:
283 label_str = "-L %s" % self.label 303 label_str = "-L %s" % self.label
@@ -286,7 +306,7 @@ class Partition():
286 (self.fstype, extraopts, rootfs, label_str, self.fsuuid, rootfs_dir) 306 (self.fstype, extraopts, rootfs, label_str, self.fsuuid, rootfs_dir)
287 exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) 307 exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
288 308
289 if self.updated_fstab_path and self.has_fstab: 309 if self.updated_fstab_path and self.has_fstab and not self.no_fstab_update:
290 debugfs_script_path = os.path.join(cr_workdir, "debugfs_script") 310 debugfs_script_path = os.path.join(cr_workdir, "debugfs_script")
291 with open(debugfs_script_path, "w") as f: 311 with open(debugfs_script_path, "w") as f:
292 f.write("cd etc\n") 312 f.write("cd etc\n")
@@ -298,6 +318,30 @@ class Partition():
298 mkfs_cmd = "fsck.%s -pvfD %s" % (self.fstype, rootfs) 318 mkfs_cmd = "fsck.%s -pvfD %s" % (self.fstype, rootfs)
299 exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) 319 exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
300 320
321 if os.getenv('SOURCE_DATE_EPOCH'):
322 sde_time = hex(int(os.getenv('SOURCE_DATE_EPOCH')))
323 debugfs_script_path = os.path.join(cr_workdir, "debugfs_script")
324 files = []
325 for root, dirs, others in os.walk(rootfs_dir):
326 base = root.replace(rootfs_dir, "").rstrip(os.sep)
327 files += [ "/" if base == "" else base ]
328 files += [ base + "/" + n for n in dirs + others ]
329 with open(debugfs_script_path, "w") as f:
330 f.write("set_current_time %s\n" % (sde_time))
331 if self.updated_fstab_path and self.has_fstab and not self.no_fstab_update:
332 f.write("set_inode_field /etc/fstab mtime %s\n" % (sde_time))
333 f.write("set_inode_field /etc/fstab mtime_extra 0\n")
334 for file in set(files):
335 for time in ["atime", "ctime", "crtime"]:
336 f.write("set_inode_field \"%s\" %s %s\n" % (file, time, sde_time))
337 f.write("set_inode_field \"%s\" %s_extra 0\n" % (file, time))
338 for time in ["wtime", "mkfs_time", "lastcheck"]:
339 f.write("set_super_value %s %s\n" % (time, sde_time))
340 for time in ["mtime", "first_error_time", "last_error_time"]:
341 f.write("set_super_value %s 0\n" % (time))
342 debugfs_cmd = "debugfs -w -f %s %s" % (debugfs_script_path, rootfs)
343 exec_native_cmd(debugfs_cmd, native_sysroot)
344
301 self.check_for_Y2038_problem(rootfs, native_sysroot) 345 self.check_for_Y2038_problem(rootfs, native_sysroot)
302 346
303 def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, 347 def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir,
@@ -350,8 +394,8 @@ class Partition():
350 mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (rootfs, rootfs_dir) 394 mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (rootfs, rootfs_dir)
351 exec_native_cmd(mcopy_cmd, native_sysroot) 395 exec_native_cmd(mcopy_cmd, native_sysroot)
352 396
353 if self.updated_fstab_path and self.has_fstab: 397 if self.updated_fstab_path and self.has_fstab and not self.no_fstab_update:
354 mcopy_cmd = "mcopy -i %s %s ::/etc/fstab" % (rootfs, self.updated_fstab_path) 398 mcopy_cmd = "mcopy -m -i %s %s ::/etc/fstab" % (rootfs, self.updated_fstab_path)
355 exec_native_cmd(mcopy_cmd, native_sysroot) 399 exec_native_cmd(mcopy_cmd, native_sysroot)
356 400
357 chmod_cmd = "chmod 644 %s" % rootfs 401 chmod_cmd = "chmod 644 %s" % rootfs
@@ -369,6 +413,19 @@ class Partition():
369 (rootfs_dir, rootfs, extraopts) 413 (rootfs_dir, rootfs, extraopts)
370 exec_native_cmd(squashfs_cmd, native_sysroot, pseudo=pseudo) 414 exec_native_cmd(squashfs_cmd, native_sysroot, pseudo=pseudo)
371 415
416 def prepare_rootfs_erofs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir,
417 native_sysroot, pseudo):
418 """
419 Prepare content for a erofs rootfs partition.
420 """
421 extraopts = self.mkfs_extraopts or ''
422 erofs_cmd = "mkfs.erofs %s -U %s %s %s" % \
423 (extraopts, self.fsuuid, rootfs, rootfs_dir)
424 exec_native_cmd(erofs_cmd, native_sysroot, pseudo=pseudo)
425
426 def prepare_empty_partition_none(self, rootfs, oe_builddir, native_sysroot):
427 pass
428
372 def prepare_empty_partition_ext(self, rootfs, oe_builddir, 429 def prepare_empty_partition_ext(self, rootfs, oe_builddir,
373 native_sysroot): 430 native_sysroot):
374 """ 431 """
diff --git a/scripts/lib/wic/pluginbase.py b/scripts/lib/wic/pluginbase.py
index d9b4e57747..b64568339b 100644
--- a/scripts/lib/wic/pluginbase.py
+++ b/scripts/lib/wic/pluginbase.py
@@ -9,9 +9,11 @@ __all__ = ['ImagerPlugin', 'SourcePlugin']
9 9
10import os 10import os
11import logging 11import logging
12import types
12 13
13from collections import defaultdict 14from collections import defaultdict
14from importlib.machinery import SourceFileLoader 15import importlib
16import importlib.util
15 17
16from wic import WicError 18from wic import WicError
17from wic.misc import get_bitbake_var 19from wic.misc import get_bitbake_var
@@ -54,7 +56,9 @@ class PluginMgr:
54 mname = fname[:-3] 56 mname = fname[:-3]
55 mpath = os.path.join(ppath, fname) 57 mpath = os.path.join(ppath, fname)
56 logger.debug("loading plugin module %s", mpath) 58 logger.debug("loading plugin module %s", mpath)
57 SourceFileLoader(mname, mpath).load_module() 59 spec = importlib.util.spec_from_file_location(mname, mpath)
60 module = importlib.util.module_from_spec(spec)
61 spec.loader.exec_module(module)
58 62
59 return PLUGINS.get(ptype) 63 return PLUGINS.get(ptype)
60 64
diff --git a/scripts/lib/wic/plugins/imager/direct.py b/scripts/lib/wic/plugins/imager/direct.py
index ea709e8c54..a1d152659b 100644
--- a/scripts/lib/wic/plugins/imager/direct.py
+++ b/scripts/lib/wic/plugins/imager/direct.py
@@ -77,7 +77,8 @@ class DirectPlugin(ImagerPlugin):
77 77
78 image_path = self._full_path(self.workdir, self.parts[0].disk, "direct") 78 image_path = self._full_path(self.workdir, self.parts[0].disk, "direct")
79 self._image = PartitionedImage(image_path, self.ptable_format, 79 self._image = PartitionedImage(image_path, self.ptable_format,
80 self.parts, self.native_sysroot) 80 self.parts, self.native_sysroot,
81 options.extra_space)
81 82
82 def setup_workdir(self, workdir): 83 def setup_workdir(self, workdir):
83 if workdir: 84 if workdir:
@@ -116,7 +117,7 @@ class DirectPlugin(ImagerPlugin):
116 updated = False 117 updated = False
117 for part in self.parts: 118 for part in self.parts:
118 if not part.realnum or not part.mountpoint \ 119 if not part.realnum or not part.mountpoint \
119 or part.mountpoint == "/": 120 or part.mountpoint == "/" or not (part.mountpoint.startswith('/') or part.mountpoint == "swap"):
120 continue 121 continue
121 122
122 if part.use_uuid: 123 if part.use_uuid:
@@ -137,8 +138,9 @@ class DirectPlugin(ImagerPlugin):
137 device_name = "/dev/%s%s%d" % (part.disk, prefix, part.realnum) 138 device_name = "/dev/%s%s%d" % (part.disk, prefix, part.realnum)
138 139
139 opts = part.fsopts if part.fsopts else "defaults" 140 opts = part.fsopts if part.fsopts else "defaults"
141 passno = part.fspassno if part.fspassno else "0"
140 line = "\t".join([device_name, part.mountpoint, part.fstype, 142 line = "\t".join([device_name, part.mountpoint, part.fstype,
141 opts, "0", "0"]) + "\n" 143 opts, "0", passno]) + "\n"
142 144
143 fstab_lines.append(line) 145 fstab_lines.append(line)
144 updated = True 146 updated = True
@@ -147,6 +149,9 @@ class DirectPlugin(ImagerPlugin):
147 self.updated_fstab_path = os.path.join(self.workdir, "fstab") 149 self.updated_fstab_path = os.path.join(self.workdir, "fstab")
148 with open(self.updated_fstab_path, "w") as f: 150 with open(self.updated_fstab_path, "w") as f:
149 f.writelines(fstab_lines) 151 f.writelines(fstab_lines)
152 if os.getenv('SOURCE_DATE_EPOCH'):
153 fstab_time = int(os.getenv('SOURCE_DATE_EPOCH'))
154 os.utime(self.updated_fstab_path, (fstab_time, fstab_time))
150 155
151 def _full_path(self, path, name, extention): 156 def _full_path(self, path, name, extention):
152 """ Construct full file path to a file we generate. """ 157 """ Construct full file path to a file we generate. """
@@ -258,6 +263,8 @@ class DirectPlugin(ImagerPlugin):
258 if part.mountpoint == "/": 263 if part.mountpoint == "/":
259 if part.uuid: 264 if part.uuid:
260 return "PARTUUID=%s" % part.uuid 265 return "PARTUUID=%s" % part.uuid
266 elif part.label and self.ptable_format != 'msdos':
267 return "PARTLABEL=%s" % part.label
261 else: 268 else:
262 suffix = 'p' if part.disk.startswith('mmcblk') else '' 269 suffix = 'p' if part.disk.startswith('mmcblk') else ''
263 return "/dev/%s%s%-d" % (part.disk, suffix, part.realnum) 270 return "/dev/%s%s%-d" % (part.disk, suffix, part.realnum)
@@ -293,7 +300,7 @@ class PartitionedImage():
293 Partitioned image in a file. 300 Partitioned image in a file.
294 """ 301 """
295 302
296 def __init__(self, path, ptable_format, partitions, native_sysroot=None): 303 def __init__(self, path, ptable_format, partitions, native_sysroot=None, extra_space=0):
297 self.path = path # Path to the image file 304 self.path = path # Path to the image file
298 self.numpart = 0 # Number of allocated partitions 305 self.numpart = 0 # Number of allocated partitions
299 self.realpart = 0 # Number of partitions in the partition table 306 self.realpart = 0 # Number of partitions in the partition table
@@ -306,7 +313,10 @@ class PartitionedImage():
306 # all partitions (in bytes) 313 # all partitions (in bytes)
307 self.ptable_format = ptable_format # Partition table format 314 self.ptable_format = ptable_format # Partition table format
308 # Disk system identifier 315 # Disk system identifier
309 self.identifier = random.SystemRandom().randint(1, 0xffffffff) 316 if os.getenv('SOURCE_DATE_EPOCH'):
317 self.identifier = random.Random(int(os.getenv('SOURCE_DATE_EPOCH'))).randint(1, 0xffffffff)
318 else:
319 self.identifier = random.SystemRandom().randint(1, 0xffffffff)
310 320
311 self.partitions = partitions 321 self.partitions = partitions
312 self.partimages = [] 322 self.partimages = []
@@ -314,6 +324,7 @@ class PartitionedImage():
314 self.sector_size = SECTOR_SIZE 324 self.sector_size = SECTOR_SIZE
315 self.native_sysroot = native_sysroot 325 self.native_sysroot = native_sysroot
316 num_real_partitions = len([p for p in self.partitions if not p.no_table]) 326 num_real_partitions = len([p for p in self.partitions if not p.no_table])
327 self.extra_space = extra_space
317 328
318 # calculate the real partition number, accounting for partitions not 329 # calculate the real partition number, accounting for partitions not
319 # in the partition table and logical partitions 330 # in the partition table and logical partitions
@@ -331,7 +342,7 @@ class PartitionedImage():
331 # generate parition and filesystem UUIDs 342 # generate parition and filesystem UUIDs
332 for part in self.partitions: 343 for part in self.partitions:
333 if not part.uuid and part.use_uuid: 344 if not part.uuid and part.use_uuid:
334 if self.ptable_format == 'gpt': 345 if self.ptable_format in ('gpt', 'gpt-hybrid'):
335 part.uuid = str(uuid.uuid4()) 346 part.uuid = str(uuid.uuid4())
336 else: # msdos partition table 347 else: # msdos partition table
337 part.uuid = '%08x-%02d' % (self.identifier, part.realnum) 348 part.uuid = '%08x-%02d' % (self.identifier, part.realnum)
@@ -387,6 +398,10 @@ class PartitionedImage():
387 raise WicError("setting custom partition type is not " \ 398 raise WicError("setting custom partition type is not " \
388 "implemented for msdos partitions") 399 "implemented for msdos partitions")
389 400
401 if part.mbr and self.ptable_format != 'gpt-hybrid':
402 raise WicError("Partition may only be included in MBR with " \
403 "a gpt-hybrid partition table")
404
390 # Get the disk where the partition is located 405 # Get the disk where the partition is located
391 self.numpart += 1 406 self.numpart += 1
392 if not part.no_table: 407 if not part.no_table:
@@ -395,7 +410,7 @@ class PartitionedImage():
395 if self.numpart == 1: 410 if self.numpart == 1:
396 if self.ptable_format == "msdos": 411 if self.ptable_format == "msdos":
397 overhead = MBR_OVERHEAD 412 overhead = MBR_OVERHEAD
398 elif self.ptable_format == "gpt": 413 elif self.ptable_format in ("gpt", "gpt-hybrid"):
399 overhead = GPT_OVERHEAD 414 overhead = GPT_OVERHEAD
400 415
401 # Skip one sector required for the partitioning scheme overhead 416 # Skip one sector required for the partitioning scheme overhead
@@ -479,10 +494,11 @@ class PartitionedImage():
479 # Once all the partitions have been layed out, we can calculate the 494 # Once all the partitions have been layed out, we can calculate the
480 # minumim disk size 495 # minumim disk size
481 self.min_size = self.offset 496 self.min_size = self.offset
482 if self.ptable_format == "gpt": 497 if self.ptable_format in ("gpt", "gpt-hybrid"):
483 self.min_size += GPT_OVERHEAD 498 self.min_size += GPT_OVERHEAD
484 499
485 self.min_size *= self.sector_size 500 self.min_size *= self.sector_size
501 self.min_size += self.extra_space
486 502
487 def _create_partition(self, device, parttype, fstype, start, size): 503 def _create_partition(self, device, parttype, fstype, start, size):
488 """ Create a partition on an image described by the 'device' object. """ 504 """ Create a partition on an image described by the 'device' object. """
@@ -499,22 +515,49 @@ class PartitionedImage():
499 515
500 return exec_native_cmd(cmd, self.native_sysroot) 516 return exec_native_cmd(cmd, self.native_sysroot)
501 517
518 def _write_identifier(self, device, identifier):
519 logger.debug("Set disk identifier %x", identifier)
520 with open(device, 'r+b') as img:
521 img.seek(0x1B8)
522 img.write(identifier.to_bytes(4, 'little'))
523
524 def _make_disk(self, device, ptable_format, min_size):
525 logger.debug("Creating sparse file %s", device)
526 with open(device, 'w') as sparse:
527 os.ftruncate(sparse.fileno(), min_size)
528
529 logger.debug("Initializing partition table for %s", device)
530 exec_native_cmd("parted -s %s mklabel %s" % (device, ptable_format),
531 self.native_sysroot)
532
533 def _write_disk_guid(self):
534 if self.ptable_format in ('gpt', 'gpt-hybrid'):
535 if os.getenv('SOURCE_DATE_EPOCH'):
536 self.disk_guid = uuid.UUID(int=int(os.getenv('SOURCE_DATE_EPOCH')))
537 else:
538 self.disk_guid = uuid.uuid4()
539
540 logger.debug("Set disk guid %s", self.disk_guid)
541 sfdisk_cmd = "sfdisk --disk-id %s %s" % (self.path, self.disk_guid)
542 exec_native_cmd(sfdisk_cmd, self.native_sysroot)
543
502 def create(self): 544 def create(self):
503 logger.debug("Creating sparse file %s", self.path) 545 self._make_disk(self.path,
504 with open(self.path, 'w') as sparse: 546 "gpt" if self.ptable_format == "gpt-hybrid" else self.ptable_format,
505 os.ftruncate(sparse.fileno(), self.min_size) 547 self.min_size)
506 548
507 logger.debug("Initializing partition table for %s", self.path) 549 self._write_identifier(self.path, self.identifier)
508 exec_native_cmd("parted -s %s mklabel %s" % 550 self._write_disk_guid()
509 (self.path, self.ptable_format), self.native_sysroot)
510 551
511 logger.debug("Set disk identifier %x", self.identifier) 552 if self.ptable_format == "gpt-hybrid":
512 with open(self.path, 'r+b') as img: 553 mbr_path = self.path + ".mbr"
513 img.seek(0x1B8) 554 self._make_disk(mbr_path, "msdos", self.min_size)
514 img.write(self.identifier.to_bytes(4, 'little')) 555 self._write_identifier(mbr_path, self.identifier)
515 556
516 logger.debug("Creating partitions") 557 logger.debug("Creating partitions")
517 558
559 hybrid_mbr_part_num = 0
560
518 for part in self.partitions: 561 for part in self.partitions:
519 if part.num == 0: 562 if part.num == 0:
520 continue 563 continue
@@ -559,11 +602,19 @@ class PartitionedImage():
559 self._create_partition(self.path, part.type, 602 self._create_partition(self.path, part.type,
560 parted_fs_type, part.start, part.size_sec) 603 parted_fs_type, part.start, part.size_sec)
561 604
562 if part.part_name: 605 if self.ptable_format == "gpt-hybrid" and part.mbr:
606 hybrid_mbr_part_num += 1
607 if hybrid_mbr_part_num > 4:
608 raise WicError("Extended MBR partitions are not supported in hybrid MBR")
609 self._create_partition(mbr_path, "primary",
610 parted_fs_type, part.start, part.size_sec)
611
612 if self.ptable_format in ("gpt", "gpt-hybrid") and (part.part_name or part.label):
613 partition_label = part.part_name if part.part_name else part.label
563 logger.debug("partition %d: set name to %s", 614 logger.debug("partition %d: set name to %s",
564 part.num, part.part_name) 615 part.num, partition_label)
565 exec_native_cmd("sgdisk --change-name=%d:%s %s" % \ 616 exec_native_cmd("sgdisk --change-name=%d:%s %s" % \
566 (part.num, part.part_name, 617 (part.num, partition_label,
567 self.path), self.native_sysroot) 618 self.path), self.native_sysroot)
568 619
569 if part.part_type: 620 if part.part_type:
@@ -573,32 +624,55 @@ class PartitionedImage():
573 (part.num, part.part_type, 624 (part.num, part.part_type,
574 self.path), self.native_sysroot) 625 self.path), self.native_sysroot)
575 626
576 if part.uuid and self.ptable_format == "gpt": 627 if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"):
577 logger.debug("partition %d: set UUID to %s", 628 logger.debug("partition %d: set UUID to %s",
578 part.num, part.uuid) 629 part.num, part.uuid)
579 exec_native_cmd("sgdisk --partition-guid=%d:%s %s" % \ 630 exec_native_cmd("sgdisk --partition-guid=%d:%s %s" % \
580 (part.num, part.uuid, self.path), 631 (part.num, part.uuid, self.path),
581 self.native_sysroot) 632 self.native_sysroot)
582 633
583 if part.label and self.ptable_format == "gpt":
584 logger.debug("partition %d: set name to %s",
585 part.num, part.label)
586 exec_native_cmd("parted -s %s name %d %s" % \
587 (self.path, part.num, part.label),
588 self.native_sysroot)
589
590 if part.active: 634 if part.active:
591 flag_name = "legacy_boot" if self.ptable_format == 'gpt' else "boot" 635 flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot"
592 logger.debug("Set '%s' flag for partition '%s' on disk '%s'", 636 logger.debug("Set '%s' flag for partition '%s' on disk '%s'",
593 flag_name, part.num, self.path) 637 flag_name, part.num, self.path)
594 exec_native_cmd("parted -s %s set %d %s on" % \ 638 exec_native_cmd("parted -s %s set %d %s on" % \
595 (self.path, part.num, flag_name), 639 (self.path, part.num, flag_name),
596 self.native_sysroot) 640 self.native_sysroot)
641 if self.ptable_format == 'gpt-hybrid' and part.mbr:
642 exec_native_cmd("parted -s %s set %d %s on" % \
643 (mbr_path, hybrid_mbr_part_num, "boot"),
644 self.native_sysroot)
597 if part.system_id: 645 if part.system_id:
598 exec_native_cmd("sfdisk --part-type %s %s %s" % \ 646 exec_native_cmd("sfdisk --part-type %s %s %s" % \
599 (self.path, part.num, part.system_id), 647 (self.path, part.num, part.system_id),
600 self.native_sysroot) 648 self.native_sysroot)
601 649
650 if part.hidden and self.ptable_format == "gpt":
651 logger.debug("Set hidden attribute for partition '%s' on disk '%s'",
652 part.num, self.path)
653 exec_native_cmd("sfdisk --part-attrs %s %s RequiredPartition" % \
654 (self.path, part.num),
655 self.native_sysroot)
656
657 if self.ptable_format == "gpt-hybrid":
658 # Write a protective GPT partition
659 hybrid_mbr_part_num += 1
660 if hybrid_mbr_part_num > 4:
661 raise WicError("Extended MBR partitions are not supported in hybrid MBR")
662
663 # parted cannot directly create a protective GPT partition, so
664 # create with an arbitrary type, then change it to the correct type
665 # with sfdisk
666 self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD)
667 exec_native_cmd("sfdisk --part-type %s %d 0xee" % (mbr_path, hybrid_mbr_part_num),
668 self.native_sysroot)
669
670 # Copy hybrid MBR
671 with open(mbr_path, "rb") as mbr_file:
672 with open(self.path, "r+b") as image_file:
673 mbr = mbr_file.read(512)
674 image_file.write(mbr)
675
602 def cleanup(self): 676 def cleanup(self):
603 pass 677 pass
604 678
diff --git a/scripts/lib/wic/plugins/source/bootimg-efi.py b/scripts/lib/wic/plugins/source/bootimg-efi.py
index cdc72543c2..13a9cddf4e 100644
--- a/scripts/lib/wic/plugins/source/bootimg-efi.py
+++ b/scripts/lib/wic/plugins/source/bootimg-efi.py
@@ -12,6 +12,7 @@
12 12
13import logging 13import logging
14import os 14import os
15import tempfile
15import shutil 16import shutil
16import re 17import re
17 18
@@ -34,6 +35,26 @@ class BootimgEFIPlugin(SourcePlugin):
34 name = 'bootimg-efi' 35 name = 'bootimg-efi'
35 36
36 @classmethod 37 @classmethod
38 def _copy_additional_files(cls, hdddir, initrd, dtb):
39 bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
40 if not bootimg_dir:
41 raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
42
43 if initrd:
44 initrds = initrd.split(';')
45 for rd in initrds:
46 cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir)
47 exec_cmd(cp_cmd, True)
48 else:
49 logger.debug("Ignoring missing initrd")
50
51 if dtb:
52 if ';' in dtb:
53 raise WicError("Only one DTB supported, exiting")
54 cp_cmd = "cp %s/%s %s" % (bootimg_dir, dtb, hdddir)
55 exec_cmd(cp_cmd, True)
56
57 @classmethod
37 def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params): 58 def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params):
38 """ 59 """
39 Create loader-specific (grub-efi) config 60 Create loader-specific (grub-efi) config
@@ -52,18 +73,9 @@ class BootimgEFIPlugin(SourcePlugin):
52 "get it from %s." % configfile) 73 "get it from %s." % configfile)
53 74
54 initrd = source_params.get('initrd') 75 initrd = source_params.get('initrd')
76 dtb = source_params.get('dtb')
55 77
56 if initrd: 78 cls._copy_additional_files(hdddir, initrd, dtb)
57 bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
58 if not bootimg_dir:
59 raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
60
61 initrds = initrd.split(';')
62 for rd in initrds:
63 cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir)
64 exec_cmd(cp_cmd, True)
65 else:
66 logger.debug("Ignoring missing initrd")
67 79
68 if not custom_cfg: 80 if not custom_cfg:
69 # Create grub configuration using parameters from wks file 81 # Create grub configuration using parameters from wks file
@@ -97,6 +109,9 @@ class BootimgEFIPlugin(SourcePlugin):
97 grubefi_conf += " /%s" % rd 109 grubefi_conf += " /%s" % rd
98 grubefi_conf += "\n" 110 grubefi_conf += "\n"
99 111
112 if dtb:
113 grubefi_conf += "devicetree /%s\n" % dtb
114
100 grubefi_conf += "}\n" 115 grubefi_conf += "}\n"
101 116
102 logger.debug("Writing grubefi config %s/hdd/boot/EFI/BOOT/grub.cfg", 117 logger.debug("Writing grubefi config %s/hdd/boot/EFI/BOOT/grub.cfg",
@@ -118,24 +133,18 @@ class BootimgEFIPlugin(SourcePlugin):
118 133
119 bootloader = creator.ks.bootloader 134 bootloader = creator.ks.bootloader
120 135
136 unified_image = source_params.get('create-unified-kernel-image') == "true"
137
121 loader_conf = "" 138 loader_conf = ""
122 loader_conf += "default boot\n" 139 if not unified_image:
140 loader_conf += "default boot\n"
123 loader_conf += "timeout %d\n" % bootloader.timeout 141 loader_conf += "timeout %d\n" % bootloader.timeout
124 142
125 initrd = source_params.get('initrd') 143 initrd = source_params.get('initrd')
144 dtb = source_params.get('dtb')
126 145
127 if initrd: 146 if not unified_image:
128 # obviously we need to have a common common deploy var 147 cls._copy_additional_files(hdddir, initrd, dtb)
129 bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
130 if not bootimg_dir:
131 raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
132
133 initrds = initrd.split(';')
134 for rd in initrds:
135 cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir)
136 exec_cmd(cp_cmd, True)
137 else:
138 logger.debug("Ignoring missing initrd")
139 148
140 logger.debug("Writing systemd-boot config " 149 logger.debug("Writing systemd-boot config "
141 "%s/hdd/boot/loader/loader.conf", cr_workdir) 150 "%s/hdd/boot/loader/loader.conf", cr_workdir)
@@ -183,11 +192,15 @@ class BootimgEFIPlugin(SourcePlugin):
183 for rd in initrds: 192 for rd in initrds:
184 boot_conf += "initrd /%s\n" % rd 193 boot_conf += "initrd /%s\n" % rd
185 194
186 logger.debug("Writing systemd-boot config " 195 if dtb:
187 "%s/hdd/boot/loader/entries/boot.conf", cr_workdir) 196 boot_conf += "devicetree /%s\n" % dtb
188 cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w") 197
189 cfg.write(boot_conf) 198 if not unified_image:
190 cfg.close() 199 logger.debug("Writing systemd-boot config "
200 "%s/hdd/boot/loader/entries/boot.conf", cr_workdir)
201 cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w")
202 cfg.write(boot_conf)
203 cfg.close()
191 204
192 205
193 @classmethod 206 @classmethod
@@ -207,6 +220,8 @@ class BootimgEFIPlugin(SourcePlugin):
207 cls.do_configure_grubefi(hdddir, creator, cr_workdir, source_params) 220 cls.do_configure_grubefi(hdddir, creator, cr_workdir, source_params)
208 elif source_params['loader'] == 'systemd-boot': 221 elif source_params['loader'] == 'systemd-boot':
209 cls.do_configure_systemdboot(hdddir, creator, cr_workdir, source_params) 222 cls.do_configure_systemdboot(hdddir, creator, cr_workdir, source_params)
223 elif source_params['loader'] == 'uefi-kernel':
224 pass
210 else: 225 else:
211 raise WicError("unrecognized bootimg-efi loader: %s" % source_params['loader']) 226 raise WicError("unrecognized bootimg-efi loader: %s" % source_params['loader'])
212 except KeyError: 227 except KeyError:
@@ -288,9 +303,107 @@ class BootimgEFIPlugin(SourcePlugin):
288 kernel = "%s-%s.bin" % \ 303 kernel = "%s-%s.bin" % \
289 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) 304 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
290 305
291 install_cmd = "install -m 0644 %s/%s %s/%s" % \ 306 if source_params.get('create-unified-kernel-image') == "true":
292 (staging_kernel_dir, kernel, hdddir, kernel) 307 initrd = source_params.get('initrd')
293 exec_cmd(install_cmd) 308 if not initrd:
309 raise WicError("initrd= must be specified when create-unified-kernel-image=true, exiting")
310
311 deploy_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
312 efi_stub = glob("%s/%s" % (deploy_dir, "linux*.efi.stub"))
313 if len(efi_stub) == 0:
314 raise WicError("Unified Kernel Image EFI stub not found, exiting")
315 efi_stub = efi_stub[0]
316
317 with tempfile.TemporaryDirectory() as tmp_dir:
318 label = source_params.get('label')
319 label_conf = "root=%s" % creator.rootdev
320 if label:
321 label_conf = "LABEL=%s" % label
322
323 bootloader = creator.ks.bootloader
324 cmdline = open("%s/cmdline" % tmp_dir, "w")
325 cmdline.write("%s %s" % (label_conf, bootloader.append))
326 cmdline.close()
327
328 initrds = initrd.split(';')
329 initrd = open("%s/initrd" % tmp_dir, "wb")
330 for f in initrds:
331 with open("%s/%s" % (deploy_dir, f), 'rb') as in_file:
332 shutil.copyfileobj(in_file, initrd)
333 initrd.close()
334
335 # Searched by systemd-boot:
336 # https://systemd.io/BOOT_LOADER_SPECIFICATION/#type-2-efi-unified-kernel-images
337 install_cmd = "install -d %s/EFI/Linux" % hdddir
338 exec_cmd(install_cmd)
339
340 staging_dir_host = get_bitbake_var("STAGING_DIR_HOST")
341 target_sys = get_bitbake_var("TARGET_SYS")
342
343 objdump_cmd = "%s-objdump" % target_sys
344 objdump_cmd += " -p %s" % efi_stub
345 objdump_cmd += " | awk '{ if ($1 == \"SectionAlignment\"){print $2} }'"
346
347 ret, align_str = exec_native_cmd(objdump_cmd, native_sysroot)
348 align = int(align_str, 16)
349
350 objdump_cmd = "%s-objdump" % target_sys
351 objdump_cmd += " -h %s | tail -2" % efi_stub
352 ret, output = exec_native_cmd(objdump_cmd, native_sysroot)
353
354 offset = int(output.split()[2], 16) + int(output.split()[3], 16)
355
356 osrel_off = offset + align - offset % align
357 osrel_path = "%s/usr/lib/os-release" % staging_dir_host
358 osrel_sz = os.stat(osrel_path).st_size
359
360 cmdline_off = osrel_off + osrel_sz
361 cmdline_off = cmdline_off + align - cmdline_off % align
362 cmdline_sz = os.stat(cmdline.name).st_size
363
364 dtb_off = cmdline_off + cmdline_sz
365 dtb_off = dtb_off + align - dtb_off % align
366
367 dtb = source_params.get('dtb')
368 if dtb:
369 if ';' in dtb:
370 raise WicError("Only one DTB supported, exiting")
371 dtb_path = "%s/%s" % (deploy_dir, dtb)
372 dtb_params = '--add-section .dtb=%s --change-section-vma .dtb=0x%x' % \
373 (dtb_path, dtb_off)
374 linux_off = dtb_off + os.stat(dtb_path).st_size
375 linux_off = linux_off + align - linux_off % align
376 else:
377 dtb_params = ''
378 linux_off = dtb_off
379
380 linux_path = "%s/%s" % (staging_kernel_dir, kernel)
381 linux_sz = os.stat(linux_path).st_size
382
383 initrd_off = linux_off + linux_sz
384 initrd_off = initrd_off + align - initrd_off % align
385
386 # https://www.freedesktop.org/software/systemd/man/systemd-stub.html
387 objcopy_cmd = "%s-objcopy" % target_sys
388 objcopy_cmd += " --enable-deterministic-archives"
389 objcopy_cmd += " --preserve-dates"
390 objcopy_cmd += " --add-section .osrel=%s" % osrel_path
391 objcopy_cmd += " --change-section-vma .osrel=0x%x" % osrel_off
392 objcopy_cmd += " --add-section .cmdline=%s" % cmdline.name
393 objcopy_cmd += " --change-section-vma .cmdline=0x%x" % cmdline_off
394 objcopy_cmd += dtb_params
395 objcopy_cmd += " --add-section .linux=%s" % linux_path
396 objcopy_cmd += " --change-section-vma .linux=0x%x" % linux_off
397 objcopy_cmd += " --add-section .initrd=%s" % initrd.name
398 objcopy_cmd += " --change-section-vma .initrd=0x%x" % initrd_off
399 objcopy_cmd += " %s %s/EFI/Linux/linux.efi" % (efi_stub, hdddir)
400
401 exec_native_cmd(objcopy_cmd, native_sysroot)
402 else:
403 if source_params.get('install-kernel-into-boot-dir') != 'false':
404 install_cmd = "install -m 0644 %s/%s %s/%s" % \
405 (staging_kernel_dir, kernel, hdddir, kernel)
406 exec_cmd(install_cmd)
294 407
295 if get_bitbake_var("IMAGE_EFI_BOOT_FILES"): 408 if get_bitbake_var("IMAGE_EFI_BOOT_FILES"):
296 for src_path, dst_path in cls.install_task: 409 for src_path, dst_path in cls.install_task:
@@ -312,6 +425,28 @@ class BootimgEFIPlugin(SourcePlugin):
312 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]: 425 for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]:
313 cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:]) 426 cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:])
314 exec_cmd(cp_cmd, True) 427 exec_cmd(cp_cmd, True)
428 elif source_params['loader'] == 'uefi-kernel':
429 kernel = get_bitbake_var("KERNEL_IMAGETYPE")
430 if not kernel:
431 raise WicError("Empty KERNEL_IMAGETYPE %s\n" % target)
432 target = get_bitbake_var("TARGET_SYS")
433 if not target:
434 raise WicError("Unknown arch (TARGET_SYS) %s\n" % target)
435
436 if re.match("x86_64", target):
437 kernel_efi_image = "bootx64.efi"
438 elif re.match('i.86', target):
439 kernel_efi_image = "bootia32.efi"
440 elif re.match('aarch64', target):
441 kernel_efi_image = "bootaa64.efi"
442 elif re.match('arm', target):
443 kernel_efi_image = "bootarm.efi"
444 else:
445 raise WicError("UEFI stub kernel is incompatible with target %s" % target)
446
447 for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]:
448 cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image)
449 exec_cmd(cp_cmd, True)
315 else: 450 else:
316 raise WicError("unrecognized bootimg-efi loader: %s" % 451 raise WicError("unrecognized bootimg-efi loader: %s" %
317 source_params['loader']) 452 source_params['loader'])
@@ -323,6 +458,11 @@ class BootimgEFIPlugin(SourcePlugin):
323 cp_cmd = "cp %s %s/" % (startup, hdddir) 458 cp_cmd = "cp %s %s/" % (startup, hdddir)
324 exec_cmd(cp_cmd, True) 459 exec_cmd(cp_cmd, True)
325 460
461 for paths in part.include_path or []:
462 for path in paths:
463 cp_cmd = "cp -r %s %s/" % (path, hdddir)
464 exec_cmd(cp_cmd, True)
465
326 du_cmd = "du -bks %s" % hdddir 466 du_cmd = "du -bks %s" % hdddir
327 out = exec_cmd(du_cmd) 467 out = exec_cmd(du_cmd)
328 blocks = int(out.split()[0]) 468 blocks = int(out.split()[0])
@@ -337,6 +477,13 @@ class BootimgEFIPlugin(SourcePlugin):
337 logger.debug("Added %d extra blocks to %s to get to %d total blocks", 477 logger.debug("Added %d extra blocks to %s to get to %d total blocks",
338 extra_blocks, part.mountpoint, blocks) 478 extra_blocks, part.mountpoint, blocks)
339 479
480 # required for compatibility with certain devices expecting file system
481 # block count to be equal to partition block count
482 if blocks < part.fixed_size:
483 blocks = part.fixed_size
484 logger.debug("Overriding %s to %d total blocks for compatibility",
485 part.mountpoint, blocks)
486
340 # dosfs image, created by mkdosfs 487 # dosfs image, created by mkdosfs
341 bootimg = "%s/boot.img" % cr_workdir 488 bootimg = "%s/boot.img" % cr_workdir
342 489
diff --git a/scripts/lib/wic/plugins/source/bootimg-partition.py b/scripts/lib/wic/plugins/source/bootimg-partition.py
index 5dbe2558d2..1071d1af3f 100644
--- a/scripts/lib/wic/plugins/source/bootimg-partition.py
+++ b/scripts/lib/wic/plugins/source/bootimg-partition.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4# DESCRIPTION 6# DESCRIPTION
@@ -30,6 +32,7 @@ class BootimgPartitionPlugin(SourcePlugin):
30 """ 32 """
31 33
32 name = 'bootimg-partition' 34 name = 'bootimg-partition'
35 image_boot_files_var_name = 'IMAGE_BOOT_FILES'
33 36
34 @classmethod 37 @classmethod
35 def do_configure_partition(cls, part, source_params, cr, cr_workdir, 38 def do_configure_partition(cls, part, source_params, cr, cr_workdir,
@@ -54,12 +57,12 @@ class BootimgPartitionPlugin(SourcePlugin):
54 else: 57 else:
55 var = "" 58 var = ""
56 59
57 boot_files = get_bitbake_var("IMAGE_BOOT_FILES" + var) 60 boot_files = get_bitbake_var(cls.image_boot_files_var_name + var)
58 if boot_files is not None: 61 if boot_files is not None:
59 break 62 break
60 63
61 if boot_files is None: 64 if boot_files is None:
62 raise WicError('No boot files defined, IMAGE_BOOT_FILES unset for entry #%d' % part.lineno) 65 raise WicError('No boot files defined, %s unset for entry #%d' % (cls.image_boot_files_var_name, part.lineno))
63 66
64 logger.debug('Boot files: %s', boot_files) 67 logger.debug('Boot files: %s', boot_files)
65 68
@@ -110,7 +113,7 @@ class BootimgPartitionPlugin(SourcePlugin):
110 # Use a custom configuration for extlinux.conf 113 # Use a custom configuration for extlinux.conf
111 extlinux_conf = custom_cfg 114 extlinux_conf = custom_cfg
112 logger.debug("Using custom configuration file " 115 logger.debug("Using custom configuration file "
113 "%s for extlinux.cfg", configfile) 116 "%s for extlinux.conf", configfile)
114 else: 117 else:
115 raise WicError("configfile is specified but failed to " 118 raise WicError("configfile is specified but failed to "
116 "get it from %s." % configfile) 119 "get it from %s." % configfile)
diff --git a/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/scripts/lib/wic/plugins/source/bootimg-pcbios.py
index f2639e7004..a207a83530 100644
--- a/scripts/lib/wic/plugins/source/bootimg-pcbios.py
+++ b/scripts/lib/wic/plugins/source/bootimg-pcbios.py
@@ -122,7 +122,7 @@ class BootimgPcbiosPlugin(SourcePlugin):
122 syslinux_conf += "DEFAULT boot\n" 122 syslinux_conf += "DEFAULT boot\n"
123 syslinux_conf += "LABEL boot\n" 123 syslinux_conf += "LABEL boot\n"
124 124
125 kernel = "/vmlinuz" 125 kernel = "/" + get_bitbake_var("KERNEL_IMAGETYPE")
126 syslinux_conf += "KERNEL " + kernel + "\n" 126 syslinux_conf += "KERNEL " + kernel + "\n"
127 127
128 syslinux_conf += "APPEND label=boot root=%s %s\n" % \ 128 syslinux_conf += "APPEND label=boot root=%s %s\n" % \
@@ -155,8 +155,8 @@ class BootimgPcbiosPlugin(SourcePlugin):
155 kernel = "%s-%s.bin" % \ 155 kernel = "%s-%s.bin" % \
156 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME")) 156 (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
157 157
158 cmds = ("install -m 0644 %s/%s %s/vmlinuz" % 158 cmds = ("install -m 0644 %s/%s %s/%s" %
159 (staging_kernel_dir, kernel, hdddir), 159 (staging_kernel_dir, kernel, hdddir, get_bitbake_var("KERNEL_IMAGETYPE")),
160 "install -m 444 %s/syslinux/ldlinux.sys %s/ldlinux.sys" % 160 "install -m 444 %s/syslinux/ldlinux.sys %s/ldlinux.sys" %
161 (bootimg_dir, hdddir), 161 (bootimg_dir, hdddir),
162 "install -m 0644 %s/syslinux/vesamenu.c32 %s/vesamenu.c32" % 162 "install -m 0644 %s/syslinux/vesamenu.c32 %s/vesamenu.c32" %
@@ -186,8 +186,10 @@ class BootimgPcbiosPlugin(SourcePlugin):
186 # dosfs image, created by mkdosfs 186 # dosfs image, created by mkdosfs
187 bootimg = "%s/boot%s.img" % (cr_workdir, part.lineno) 187 bootimg = "%s/boot%s.img" % (cr_workdir, part.lineno)
188 188
189 dosfs_cmd = "mkdosfs -n boot -i %s -S 512 -C %s %d" % \ 189 label = part.label if part.label else "boot"
190 (part.fsuuid, bootimg, blocks) 190
191 dosfs_cmd = "mkdosfs -n %s -i %s -S 512 -C %s %d" % \
192 (label, part.fsuuid, bootimg, blocks)
191 exec_native_cmd(dosfs_cmd, native_sysroot) 193 exec_native_cmd(dosfs_cmd, native_sysroot)
192 194
193 mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (bootimg, hdddir) 195 mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (bootimg, hdddir)
diff --git a/scripts/lib/wic/plugins/source/empty.py b/scripts/lib/wic/plugins/source/empty.py
index 041617d648..4178912377 100644
--- a/scripts/lib/wic/plugins/source/empty.py
+++ b/scripts/lib/wic/plugins/source/empty.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -7,9 +9,19 @@
7# To use it you must pass "empty" as argument for the "--source" parameter in 9# To use it you must pass "empty" as argument for the "--source" parameter in
8# the wks file. For example: 10# the wks file. For example:
9# part foo --source empty --ondisk sda --size="1024" --align 1024 11# part foo --source empty --ondisk sda --size="1024" --align 1024
12#
13# The plugin supports writing zeros to the start of the
14# partition. This is useful to overwrite old content like
15# filesystem signatures which may be re-recognized otherwise.
16# This feature can be enabled with
17# '--sourceparams="[fill|size=<N>[S|s|K|k|M|G]][,][bs=<N>[S|s|K|k|M|G]]"'
18# Conflicting or missing options throw errors.
10 19
11import logging 20import logging
21import os
12 22
23from wic import WicError
24from wic.ksparser import sizetype
13from wic.pluginbase import SourcePlugin 25from wic.pluginbase import SourcePlugin
14 26
15logger = logging.getLogger('wic') 27logger = logging.getLogger('wic')
@@ -17,6 +29,16 @@ logger = logging.getLogger('wic')
17class EmptyPartitionPlugin(SourcePlugin): 29class EmptyPartitionPlugin(SourcePlugin):
18 """ 30 """
19 Populate unformatted empty partition. 31 Populate unformatted empty partition.
32
33 The following sourceparams are supported:
34 - fill
35 Fill the entire partition with zeros. Requires '--fixed-size' option
36 to be set.
37 - size=<N>[S|s|K|k|M|G]
38 Set the first N bytes of the partition to zero. Default unit is 'K'.
39 - bs=<N>[S|s|K|k|M|G]
40 Write at most N bytes at a time during source file creation.
41 Defaults to '1M'. Default unit is 'K'.
20 """ 42 """
21 43
22 name = 'empty' 44 name = 'empty'
@@ -29,4 +51,39 @@ class EmptyPartitionPlugin(SourcePlugin):
29 Called to do the actual content population for a partition i.e. it 51 Called to do the actual content population for a partition i.e. it
30 'prepares' the partition to be incorporated into the image. 52 'prepares' the partition to be incorporated into the image.
31 """ 53 """
32 return 54 get_byte_count = sizetype('K', True)
55 size = 0
56
57 if 'fill' in source_params and 'size' in source_params:
58 raise WicError("Conflicting source parameters 'fill' and 'size' specified, exiting.")
59
60 # Set the size of the zeros to be written to the partition
61 if 'fill' in source_params:
62 if part.fixed_size == 0:
63 raise WicError("Source parameter 'fill' only works with the '--fixed-size' option, exiting.")
64 size = get_byte_count(part.fixed_size)
65 elif 'size' in source_params:
66 size = get_byte_count(source_params['size'])
67
68 if size == 0:
69 # Nothing to do, create empty partition
70 return
71
72 if 'bs' in source_params:
73 bs = get_byte_count(source_params['bs'])
74 else:
75 bs = get_byte_count('1M')
76
77 # Create a binary file of the requested size filled with zeros
78 source_file = os.path.join(cr_workdir, 'empty-plugin-zeros%s.bin' % part.lineno)
79 if not os.path.exists(os.path.dirname(source_file)):
80 os.makedirs(os.path.dirname(source_file))
81
82 quotient, remainder = divmod(size, bs)
83 with open(source_file, 'wb') as file:
84 for _ in range(quotient):
85 file.write(bytearray(bs))
86 file.write(bytearray(remainder))
87
88 part.size = (size + 1024 - 1) // 1024 # size in KB rounded up
89 part.source_file = source_file
diff --git a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
index afc9ea0f8f..607356ad13 100644
--- a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
+++ b/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4# DESCRIPTION 6# DESCRIPTION
diff --git a/scripts/lib/wic/plugins/source/rawcopy.py b/scripts/lib/wic/plugins/source/rawcopy.py
index 3c4997d8ba..21903c2f23 100644
--- a/scripts/lib/wic/plugins/source/rawcopy.py
+++ b/scripts/lib/wic/plugins/source/rawcopy.py
@@ -1,9 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5import logging 7import logging
6import os 8import os
9import signal
10import subprocess
7 11
8from wic import WicError 12from wic import WicError
9from wic.pluginbase import SourcePlugin 13from wic.pluginbase import SourcePlugin
@@ -21,6 +25,10 @@ class RawCopyPlugin(SourcePlugin):
21 25
22 @staticmethod 26 @staticmethod
23 def do_image_label(fstype, dst, label): 27 def do_image_label(fstype, dst, label):
28 # don't create label when fstype is none
29 if fstype == 'none':
30 return
31
24 if fstype.startswith('ext'): 32 if fstype.startswith('ext'):
25 cmd = 'tune2fs -L %s %s' % (label, dst) 33 cmd = 'tune2fs -L %s %s' % (label, dst)
26 elif fstype in ('msdos', 'vfat'): 34 elif fstype in ('msdos', 'vfat'):
@@ -29,15 +37,35 @@ class RawCopyPlugin(SourcePlugin):
29 cmd = 'btrfs filesystem label %s %s' % (dst, label) 37 cmd = 'btrfs filesystem label %s %s' % (dst, label)
30 elif fstype == 'swap': 38 elif fstype == 'swap':
31 cmd = 'mkswap -L %s %s' % (label, dst) 39 cmd = 'mkswap -L %s %s' % (label, dst)
32 elif fstype == 'squashfs': 40 elif fstype in ('squashfs', 'erofs'):
33 raise WicError("It's not possible to update a squashfs " 41 raise WicError("It's not possible to update a %s "
34 "filesystem label '%s'" % (label)) 42 "filesystem label '%s'" % (fstype, label))
35 else: 43 else:
36 raise WicError("Cannot update filesystem label: " 44 raise WicError("Cannot update filesystem label: "
37 "Unknown fstype: '%s'" % (fstype)) 45 "Unknown fstype: '%s'" % (fstype))
38 46
39 exec_cmd(cmd) 47 exec_cmd(cmd)
40 48
49 @staticmethod
50 def do_image_uncompression(src, dst, workdir):
51 def subprocess_setup():
52 # Python installs a SIGPIPE handler by default. This is usually not what
53 # non-Python subprocesses expect.
54 # SIGPIPE errors are known issues with gzip/bash
55 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
56
57 extension = os.path.splitext(src)[1]
58 decompressor = {
59 ".bz2": "bzip2",
60 ".gz": "gzip",
61 ".xz": "xz",
62 ".zst": "zstd -f",
63 }.get(extension)
64 if not decompressor:
65 raise WicError("Not supported compressor filename extension: %s" % extension)
66 cmd = "%s -dc %s > %s" % (decompressor, src, dst)
67 subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=workdir)
68
41 @classmethod 69 @classmethod
42 def do_prepare_partition(cls, part, source_params, cr, cr_workdir, 70 def do_prepare_partition(cls, part, source_params, cr, cr_workdir,
43 oe_builddir, bootimg_dir, kernel_dir, 71 oe_builddir, bootimg_dir, kernel_dir,
@@ -56,7 +84,13 @@ class RawCopyPlugin(SourcePlugin):
56 if 'file' not in source_params: 84 if 'file' not in source_params:
57 raise WicError("No file specified") 85 raise WicError("No file specified")
58 86
59 src = os.path.join(kernel_dir, source_params['file']) 87 if 'unpack' in source_params:
88 img = os.path.join(kernel_dir, source_params['file'])
89 src = os.path.join(cr_workdir, os.path.splitext(source_params['file'])[0])
90 RawCopyPlugin.do_image_uncompression(img, src, cr_workdir)
91 else:
92 src = os.path.join(kernel_dir, source_params['file'])
93
60 dst = os.path.join(cr_workdir, "%s.%s" % (os.path.basename(source_params['file']), part.lineno)) 94 dst = os.path.join(cr_workdir, "%s.%s" % (os.path.basename(source_params['file']), part.lineno))
61 95
62 if not os.path.exists(os.path.dirname(dst)): 96 if not os.path.exists(os.path.dirname(dst)):
diff --git a/scripts/lib/wic/plugins/source/rootfs.py b/scripts/lib/wic/plugins/source/rootfs.py
index 96d940a91d..c990143c0d 100644
--- a/scripts/lib/wic/plugins/source/rootfs.py
+++ b/scripts/lib/wic/plugins/source/rootfs.py
@@ -35,7 +35,7 @@ class RootfsPlugin(SourcePlugin):
35 @staticmethod 35 @staticmethod
36 def __validate_path(cmd, rootfs_dir, path): 36 def __validate_path(cmd, rootfs_dir, path):
37 if os.path.isabs(path): 37 if os.path.isabs(path):
38 logger.error("%s: Must be relative: %s" % (cmd, orig_path)) 38 logger.error("%s: Must be relative: %s" % (cmd, path))
39 sys.exit(1) 39 sys.exit(1)
40 40
41 # Disallow climbing outside of parent directory using '..', 41 # Disallow climbing outside of parent directory using '..',
@@ -43,14 +43,14 @@ class RootfsPlugin(SourcePlugin):
43 # directory, or modify a directory outside OpenEmbedded). 43 # directory, or modify a directory outside OpenEmbedded).
44 full_path = os.path.realpath(os.path.join(rootfs_dir, path)) 44 full_path = os.path.realpath(os.path.join(rootfs_dir, path))
45 if not full_path.startswith(os.path.realpath(rootfs_dir)): 45 if not full_path.startswith(os.path.realpath(rootfs_dir)):
46 logger.error("%s: Must point inside the rootfs:" % (cmd, path)) 46 logger.error("%s: Must point inside the rootfs: %s" % (cmd, path))
47 sys.exit(1) 47 sys.exit(1)
48 48
49 return full_path 49 return full_path
50 50
51 @staticmethod 51 @staticmethod
52 def __get_rootfs_dir(rootfs_dir): 52 def __get_rootfs_dir(rootfs_dir):
53 if os.path.isdir(rootfs_dir): 53 if rootfs_dir and os.path.isdir(rootfs_dir):
54 return os.path.realpath(rootfs_dir) 54 return os.path.realpath(rootfs_dir)
55 55
56 image_rootfs_dir = get_bitbake_var("IMAGE_ROOTFS", rootfs_dir) 56 image_rootfs_dir = get_bitbake_var("IMAGE_ROOTFS", rootfs_dir)
@@ -97,6 +97,9 @@ class RootfsPlugin(SourcePlugin):
97 part.has_fstab = os.path.exists(os.path.join(part.rootfs_dir, "etc/fstab")) 97 part.has_fstab = os.path.exists(os.path.join(part.rootfs_dir, "etc/fstab"))
98 pseudo_dir = os.path.join(part.rootfs_dir, "../pseudo") 98 pseudo_dir = os.path.join(part.rootfs_dir, "../pseudo")
99 if not os.path.lexists(pseudo_dir): 99 if not os.path.lexists(pseudo_dir):
100 pseudo_dir = os.path.join(cls.__get_rootfs_dir(None), '../pseudo')
101
102 if not os.path.lexists(pseudo_dir):
100 logger.warn("%s folder does not exist. " 103 logger.warn("%s folder does not exist. "
101 "Usernames and permissions will be invalid " % pseudo_dir) 104 "Usernames and permissions will be invalid " % pseudo_dir)
102 pseudo_dir = None 105 pseudo_dir = None
@@ -218,10 +221,10 @@ class RootfsPlugin(SourcePlugin):
218 # Update part.has_fstab here as fstab may have been added or 221 # Update part.has_fstab here as fstab may have been added or
219 # removed by the above modifications. 222 # removed by the above modifications.
220 part.has_fstab = os.path.exists(os.path.join(new_rootfs, "etc/fstab")) 223 part.has_fstab = os.path.exists(os.path.join(new_rootfs, "etc/fstab"))
221 if part.update_fstab_in_rootfs and part.has_fstab: 224 if part.update_fstab_in_rootfs and part.has_fstab and not part.no_fstab_update:
222 fstab_path = os.path.join(new_rootfs, "etc/fstab") 225 fstab_path = os.path.join(new_rootfs, "etc/fstab")
223 # Assume that fstab should always be owned by root with fixed permissions 226 # Assume that fstab should always be owned by root with fixed permissions
224 install_cmd = "install -m 0644 %s %s" % (part.updated_fstab_path, fstab_path) 227 install_cmd = "install -m 0644 -p %s %s" % (part.updated_fstab_path, fstab_path)
225 if new_pseudo: 228 if new_pseudo:
226 pseudo = cls.__get_pseudo(native_sysroot, new_rootfs, new_pseudo) 229 pseudo = cls.__get_pseudo(native_sysroot, new_rootfs, new_pseudo)
227 else: 230 else:
diff --git a/scripts/lnr b/scripts/lnr
deleted file mode 100755
index a2ac4fec0f..0000000000
--- a/scripts/lnr
+++ /dev/null
@@ -1,24 +0,0 @@
1#! /usr/bin/env python3
2#
3# SPDX-License-Identifier: GPL-2.0-only
4#
5
6# Create a *relative* symlink, just like ln --relative does but without needing
7# coreutils 8.16.
8
9import sys, os
10
11if len(sys.argv) != 3:
12 print("$ lnr TARGET LINK_NAME")
13 sys.exit(1)
14
15target = sys.argv[1]
16linkname = sys.argv[2]
17
18if os.path.isabs(target):
19 if not os.path.isabs(linkname):
20 linkname = os.path.abspath(linkname)
21 start = os.path.dirname(linkname)
22 target = os.path.relpath(target, start)
23
24os.symlink(target, linkname)
diff --git a/scripts/native-intercept/ar b/scripts/native-intercept/ar
new file mode 100755
index 0000000000..dcc623e3ed
--- /dev/null
+++ b/scripts/native-intercept/ar
@@ -0,0 +1,32 @@
1#!/usr/bin/env python3
2#
3# Wrapper around 'ar' that defaults to deterministic archives
4
5import os
6import shutil
7import sys
8
9# calculate path to the real 'ar'
10path = os.environ['PATH']
11path = path.replace(os.path.dirname(sys.argv[0]), '')
12real_ar = shutil.which('ar', path=path)
13
14if len(sys.argv) == 1:
15 os.execl(real_ar, 'ar')
16
17# modify args to mimic 'ar' configured with --default-deterministic-archives
18argv = sys.argv
19if argv[1].startswith('--'):
20 # No modifier given
21 None
22else:
23 # remove the optional '-'
24 if argv[1][0] == '-':
25 argv[1] = argv[1][1:]
26 if 'U' in argv[1]:
27 sys.stderr.write("ar: non-deterministic mode requested\n")
28 else:
29 argv[1] = argv[1].replace('u', '')
30 argv[1] = 'D' + argv[1]
31
32os.execv(real_ar, argv)
diff --git a/scripts/nativesdk-intercept/chgrp b/scripts/nativesdk-intercept/chgrp
new file mode 100755
index 0000000000..f8ae84b8b3
--- /dev/null
+++ b/scripts/nativesdk-intercept/chgrp
@@ -0,0 +1,30 @@
1#!/usr/bin/env python3
2#
3# Wrapper around 'chgrp' that redirects to root in all cases
4
5import os
6import shutil
7import sys
8
9# calculate path to the real 'chgrp'
10path = os.environ['PATH']
11path = path.replace(os.path.dirname(sys.argv[0]), '')
12real_chgrp = shutil.which('chgrp', path=path)
13
14args = list()
15
16found = False
17
18args.append(real_chgrp)
19
20for i in sys.argv[1:]:
21 if i.startswith("-"):
22 args.append(i)
23 continue
24 if not found:
25 args.append("root")
26 found = True
27 else:
28 args.append(i)
29
30os.execv(real_chgrp, args)
diff --git a/scripts/nativesdk-intercept/chown b/scripts/nativesdk-intercept/chown
new file mode 100755
index 0000000000..0805ceb70a
--- /dev/null
+++ b/scripts/nativesdk-intercept/chown
@@ -0,0 +1,30 @@
1#!/usr/bin/env python3
2#
3# Wrapper around 'chown' that redirects to root in all cases
4
5import os
6import shutil
7import sys
8
9# calculate path to the real 'chown'
10path = os.environ['PATH']
11path = path.replace(os.path.dirname(sys.argv[0]), '')
12real_chown = shutil.which('chown', path=path)
13
14args = list()
15
16found = False
17
18args.append(real_chown)
19
20for i in sys.argv[1:]:
21 if i.startswith("-"):
22 args.append(i)
23 continue
24 if not found:
25 args.append("root:root")
26 found = True
27 else:
28 args.append(i)
29
30os.execv(real_chown, args)
diff --git a/scripts/oe-buildenv-internal b/scripts/oe-buildenv-internal
index ba0a9b44d6..2fdb19565a 100755
--- a/scripts/oe-buildenv-internal
+++ b/scripts/oe-buildenv-internal
@@ -32,12 +32,12 @@ fi
32# We potentially have code that doesn't parse correctly with older versions 32# We potentially have code that doesn't parse correctly with older versions
33# of Python, and rather than fixing that and being eternally vigilant for 33# of Python, and rather than fixing that and being eternally vigilant for
34# any other new feature use, just check the version here. 34# any other new feature use, just check the version here.
35py_v35_check=$(python3 -c 'import sys; print(sys.version_info >= (3,5,0))') 35py_v38_check=$(python3 -c 'import sys; print(sys.version_info >= (3,8,0))')
36if [ "$py_v35_check" != "True" ]; then 36if [ "$py_v38_check" != "True" ]; then
37 echo >&2 "BitBake requires Python 3.5.0 or later as 'python3 (scripts/install-buildtools can be used if needed)'" 37 echo >&2 "BitBake requires Python 3.8.0 or later as 'python3' (scripts/install-buildtools can be used if needed)"
38 return 1 38 return 1
39fi 39fi
40unset py_v35_check 40unset py_v38_check
41 41
42if [ -z "$BDIR" ]; then 42if [ -z "$BDIR" ]; then
43 if [ -z "$1" ]; then 43 if [ -z "$1" ]; then
@@ -88,27 +88,32 @@ if [ ! -d "$BITBAKEDIR" ]; then
88 return 1 88 return 1
89fi 89fi
90 90
91# Add BitBake's library to PYTHONPATH
92PYTHONPATH=$BITBAKEDIR/lib:$PYTHONPATH
93export PYTHONPATH
94
95# Remove any paths added by sourcing this script before
96[ -n "$OE_ADDED_PATHS" ] && PATH=$(echo $PATH | sed -e "s#$OE_ADDED_PATHS##") ||
97 PATH=$(echo $PATH | sed -e "s#$OEROOT/scripts:$BITBAKEDIR/bin:##")
98
91# Make sure our paths are at the beginning of $PATH 99# Make sure our paths are at the beginning of $PATH
92for newpath in "$BITBAKEDIR/bin" "$OEROOT/scripts"; do 100OE_ADDED_PATHS="$OEROOT/scripts:$BITBAKEDIR/bin:"
93 # Remove any existences of $newpath from $PATH 101PATH="$OE_ADDED_PATHS$PATH"
94 PATH=$(echo $PATH | sed -re "s#(^|:)$newpath(:|$)#\2#g;s#^:##") 102export OE_ADDED_PATHS
95 103
96 # Add $newpath to $PATH 104# This is not needed anymore
97 PATH="$newpath:$PATH" 105unset BITBAKEDIR
98done
99unset BITBAKEDIR newpath
100 106
101# Used by the runqemu script 107# Used by the runqemu script
102export BUILDDIR 108export BUILDDIR
103export PATH
104 109
105BB_ENV_EXTRAWHITE_OE="MACHINE DISTRO TCMODE TCLIBC HTTP_PROXY http_proxy \ 110BB_ENV_PASSTHROUGH_ADDITIONS_OE="MACHINE DISTRO TCMODE TCLIBC HTTP_PROXY http_proxy \
106HTTPS_PROXY https_proxy FTP_PROXY ftp_proxy FTPS_PROXY ftps_proxy ALL_PROXY \ 111HTTPS_PROXY https_proxy FTP_PROXY ftp_proxy FTPS_PROXY ftps_proxy ALL_PROXY \
107all_proxy NO_PROXY no_proxy SSH_AGENT_PID SSH_AUTH_SOCK BB_SRCREV_POLICY \ 112all_proxy NO_PROXY no_proxy SSH_AGENT_PID SSH_AUTH_SOCK BB_SRCREV_POLICY \
108SDKMACHINE BB_NUMBER_THREADS BB_NO_NETWORK PARALLEL_MAKE GIT_PROXY_COMMAND \ 113SDKMACHINE BB_NUMBER_THREADS BB_NO_NETWORK PARALLEL_MAKE GIT_PROXY_COMMAND \
109SOCKS5_PASSWD SOCKS5_USER SCREENDIR STAMPS_DIR BBPATH_EXTRA BB_SETSCENE_ENFORCE \ 114SOCKS5_PASSWD SOCKS5_USER SCREENDIR STAMPS_DIR BBPATH_EXTRA BB_SETSCENE_ENFORCE \
110BB_LOGCONFIG" 115BB_LOGCONFIG"
111 116
112BB_ENV_EXTRAWHITE="$(echo $BB_ENV_EXTRAWHITE $BB_ENV_EXTRAWHITE_OE | tr ' ' '\n' | LC_ALL=C sort --unique | tr '\n' ' ')" 117BB_ENV_PASSTHROUGH_ADDITIONS="$(echo $BB_ENV_PASSTHROUGH_ADDITIONS $BB_ENV_PASSTHROUGH_ADDITIONS_OE | tr ' ' '\n' | LC_ALL=C sort --unique | tr '\n' ' ')"
113 118
114export BB_ENV_EXTRAWHITE 119export BB_ENV_PASSTHROUGH_ADDITIONS
diff --git a/scripts/oe-check-sstate b/scripts/oe-check-sstate
index ca249ca67b..0d171c4463 100755
--- a/scripts/oe-check-sstate
+++ b/scripts/oe-check-sstate
@@ -18,7 +18,6 @@ import re
18scripts_path = os.path.dirname(os.path.realpath(__file__)) 18scripts_path = os.path.dirname(os.path.realpath(__file__))
19lib_path = scripts_path + '/lib' 19lib_path = scripts_path + '/lib'
20sys.path = sys.path + [lib_path] 20sys.path = sys.path + [lib_path]
21import scriptutils
22import scriptpath 21import scriptpath
23scriptpath.add_bitbake_lib_path() 22scriptpath.add_bitbake_lib_path()
24import argparse_oe 23import argparse_oe
@@ -47,17 +46,14 @@ def check(args):
47 try: 46 try:
48 env = os.environ.copy() 47 env = os.environ.copy()
49 if not args.same_tmpdir: 48 if not args.same_tmpdir:
50 env['BB_ENV_EXTRAWHITE'] = env.get('BB_ENV_EXTRAWHITE', '') + ' TMPDIR_forcevariable' 49 env['BB_ENV_PASSTHROUGH_ADDITIONS'] = env.get('BB_ENV_PASSTHROUGH_ADDITIONS', '') + ' TMPDIR:forcevariable'
51 env['TMPDIR_forcevariable'] = tmpdir 50 env['TMPDIR:forcevariable'] = tmpdir
52 51
53 try: 52 try:
54 output = subprocess.check_output( 53 cmd = ['bitbake', '--dry-run', '--runall=build'] + args.target
55 'bitbake -n %s' % ' '.join(args.target), 54 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, env=env)
56 stderr=subprocess.STDOUT,
57 env=env,
58 shell=True)
59 55
60 task_re = re.compile('NOTE: Running setscene task [0-9]+ of [0-9]+ \(([^)]+)\)') 56 task_re = re.compile(r'NOTE: Running setscene task [0-9]+ of [0-9]+ \(([^)]+)\)')
61 tasks = [] 57 tasks = []
62 for line in output.decode('utf-8').splitlines(): 58 for line in output.decode('utf-8').splitlines():
63 res = task_re.match(line) 59 res = task_re.match(line)
diff --git a/scripts/oe-debuginfod b/scripts/oe-debuginfod
index 967dd5807c..5e70d37b8b 100755
--- a/scripts/oe-debuginfod
+++ b/scripts/oe-debuginfod
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: MIT 5# SPDX-License-Identifier: MIT
4# 6#
5 7
@@ -13,19 +15,29 @@ scriptpath.add_bitbake_lib_path()
13 15
14import bb.tinfoil 16import bb.tinfoil
15import subprocess 17import subprocess
18import argparse
16 19
17if __name__ == "__main__": 20if __name__ == "__main__":
21 p = argparse.ArgumentParser()
22 p.add_argument("-d", action='store_true', \
23 help="store debuginfod files in project sub-directory")
24
25 args = p.parse_args()
26
18 with bb.tinfoil.Tinfoil() as tinfoil: 27 with bb.tinfoil.Tinfoil() as tinfoil:
19 tinfoil.prepare(config_only=True) 28 tinfoil.prepare(config_only=True)
20 package_classes_var = "DEPLOY_DIR_" + tinfoil.config_data.getVar("PACKAGE_CLASSES").split()[0].replace("package_", "").upper() 29 package_classes_var = "DEPLOY_DIR_" + tinfoil.config_data.getVar("PACKAGE_CLASSES").split()[0].replace("package_", "").upper()
21 feed_dir = tinfoil.config_data.getVar(package_classes_var, expand=True) 30 feed_dir = tinfoil.config_data.getVar(package_classes_var, expand=True)
22 31
23 try: 32 opts = [ '--verbose', '-R', '-U', feed_dir ]
24 if package_classes_var == "DEPLOY_DIR_RPM": 33
25 subprocess.check_output(subprocess.run(['oe-run-native', 'elfutils-native', 'debuginfod', '--verbose', '-R', feed_dir])) 34 if args.d:
26 else: 35 fdir = os.path.join(os.getcwd(), 'oedid-files')
27 subprocess.check_output(subprocess.run(['oe-run-native', 'elfutils-native', 'debuginfod', '--verbose', '-U', feed_dir])) 36 os.makedirs(fdir, exist_ok=True)
28 except subprocess.CalledProcessError: 37 opts += [ '-d', os.path.join(fdir, 'did.sqlite') ]
29 print("\nTo use the debuginfod server Please ensure that this variable PACKAGECONFIG_pn-elfutils-native = \"debuginfod libdebuginfod\" is set in the local.conf") 38
30 except KeyboardInterrupt: 39 subprocess.call(['bitbake', '-c', 'addto_recipe_sysroot', 'elfutils-native'])
31 sys.exit(1) 40
41 subprocess.call(['oe-run-native', 'elfutils-native', 'debuginfod'] + opts)
42 # we should not get here
43 print("\nTo use the debuginfod server please ensure that this variable PACKAGECONFIG:pn-elfutils-native = \"debuginfod libdebuginfod\" is set in the local.conf")
diff --git a/scripts/oe-depends-dot b/scripts/oe-depends-dot
index 5eb3e12769..d02ee455f6 100755
--- a/scripts/oe-depends-dot
+++ b/scripts/oe-depends-dot
@@ -14,8 +14,8 @@ import re
14class Dot(object): 14class Dot(object):
15 def __init__(self): 15 def __init__(self):
16 parser = argparse.ArgumentParser( 16 parser = argparse.ArgumentParser(
17 description="Analyse recipe-depends.dot generated by bitbake -g", 17 description="Analyse task-depends.dot generated by bitbake -g",
18 epilog="Use %(prog)s --help to get help") 18 formatter_class=argparse.RawDescriptionHelpFormatter)
19 parser.add_argument("dotfile", 19 parser.add_argument("dotfile",
20 help = "Specify the dotfile", nargs = 1, action='store', default='') 20 help = "Specify the dotfile", nargs = 1, action='store', default='')
21 parser.add_argument("-k", "--key", 21 parser.add_argument("-k", "--key",
@@ -32,6 +32,21 @@ class Dot(object):
32 " For example, A->B, B->C, A->C, then A->C can be removed.", 32 " For example, A->B, B->C, A->C, then A->C can be removed.",
33 action="store_true", default=False) 33 action="store_true", default=False)
34 34
35 parser.epilog = """
36Examples:
37First generate the .dot file:
38 bitbake -g core-image-minimal
39
40To find out why a package is being built:
41 %(prog)s -k <package> -w ./task-depends.dot
42
43To find out what a package depends on:
44 %(prog)s -k <package> -d ./task-depends.dot
45
46Reduce the .dot file packages only, no tasks:
47 %(prog)s -r ./task-depends.dot
48"""
49
35 self.args = parser.parse_args() 50 self.args = parser.parse_args()
36 51
37 if len(sys.argv) != 3 and len(sys.argv) < 5: 52 if len(sys.argv) != 3 and len(sys.argv) < 5:
@@ -99,6 +114,10 @@ class Dot(object):
99 if key == "meta-world-pkgdata": 114 if key == "meta-world-pkgdata":
100 continue 115 continue
101 dep = m.group(2) 116 dep = m.group(2)
117 key = key.split('.')[0]
118 dep = dep.split('.')[0]
119 if key == dep:
120 continue
102 if key in depends: 121 if key in depends:
103 if not key in depends[key]: 122 if not key in depends[key]:
104 depends[key].add(dep) 123 depends[key].add(dep)
@@ -140,9 +159,14 @@ class Dot(object):
140 159
141 reverse_deps = [] 160 reverse_deps = []
142 if self.args.why: 161 if self.args.why:
143 for k, v in depends.items(): 162 key_list = [self.args.key]
144 if self.args.key in v and not k in reverse_deps: 163 current_key = self.args.key
145 reverse_deps.append(k) 164 while (len(key_list) != 0):
165 current_key = key_list.pop()
166 for k, v in depends.items():
167 if current_key in v and not k in reverse_deps:
168 reverse_deps.append(k)
169 key_list.append(k)
146 print('Because: %s' % ' '.join(reverse_deps)) 170 print('Because: %s' % ' '.join(reverse_deps))
147 Dot.print_dep_chains(self.args.key, reverse_deps, depends) 171 Dot.print_dep_chains(self.args.key, reverse_deps, depends)
148 172
diff --git a/scripts/oe-find-native-sysroot b/scripts/oe-find-native-sysroot
index 5146bbf999..6228efcbee 100755
--- a/scripts/oe-find-native-sysroot
+++ b/scripts/oe-find-native-sysroot
@@ -36,20 +36,9 @@ if [ "$1" = '--help' -o "$1" = '-h' -o $# -ne 1 ] ; then
36fi 36fi
37 37
38# Global vars 38# Global vars
39BITBAKE_E=""
40set_oe_native_sysroot(){ 39set_oe_native_sysroot(){
41 echo "Running bitbake -e $1" 40 echo "Getting sysroot..."
42 BITBAKE_E="`bitbake -e $1`" 41 OECORE_NATIVE_SYSROOT=$(bitbake-getvar -r $1 --value STAGING_DIR_NATIVE)
43 OECORE_NATIVE_SYSROOT=`echo "$BITBAKE_E" | grep ^STAGING_DIR_NATIVE= | cut -d '"' -f2`
44
45 if [ "x$OECORE_NATIVE_SYSROOT" = "x" ]; then
46 # This indicates that there was an error running bitbake -e that
47 # the user needs to be informed of
48 echo "There was an error running bitbake to determine STAGING_DIR_NATIVE"
49 echo "Here is the output from bitbake -e $1"
50 echo $BITBAKE_E
51 exit 1
52 fi
53} 42}
54 43
55if [ "x$OECORE_NATIVE_SYSROOT" = "x" ]; then 44if [ "x$OECORE_NATIVE_SYSROOT" = "x" ]; then
diff --git a/scripts/oe-gnome-terminal-phonehome b/scripts/oe-gnome-terminal-phonehome
index b6b9a3867b..1352a9872b 100755
--- a/scripts/oe-gnome-terminal-phonehome
+++ b/scripts/oe-gnome-terminal-phonehome
@@ -1,5 +1,7 @@
1#!/bin/sh 1#!/bin/sh
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Gnome terminal won't tell us which PID a given command is run as 7# Gnome terminal won't tell us which PID a given command is run as
diff --git a/scripts/oe-pkgdata-browser b/scripts/oe-pkgdata-browser
index 8d223185a4..c152c82b25 100755
--- a/scripts/oe-pkgdata-browser
+++ b/scripts/oe-pkgdata-browser
@@ -1,4 +1,9 @@
1#! /usr/bin/env python3 1#! /usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
2 7
3import os, sys, enum, ast 8import os, sys, enum, ast
4 9
@@ -49,11 +54,11 @@ def load(filename, suffix=None):
49 from configparser import ConfigParser 54 from configparser import ConfigParser
50 from itertools import chain 55 from itertools import chain
51 56
52 parser = ConfigParser() 57 parser = ConfigParser(delimiters=('='))
53 if suffix: 58 if suffix:
54 parser.optionxform = lambda option: option.replace("_" + suffix, "") 59 parser.optionxform = lambda option: option.replace(":" + suffix, "")
55 with open(filename) as lines: 60 with open(filename) as lines:
56 lines = chain(("[fake]",), lines) 61 lines = chain(("[fake]",), (line.replace(": ", " = ", 1) for line in lines))
57 parser.read_file(lines) 62 parser.read_file(lines)
58 63
59 # TODO extract the data and put it into a real dict so we can transform some 64 # TODO extract the data and put it into a real dict so we can transform some
@@ -236,6 +241,8 @@ class PkgUi():
236 update_deps("RPROVIDES", "Provides: ", self.provides_label, clickable=False) 241 update_deps("RPROVIDES", "Provides: ", self.provides_label, clickable=False)
237 242
238 def load_recipes(self): 243 def load_recipes(self):
244 if not os.path.exists(pkgdata):
245 sys.exit("Error: Please ensure %s exists by generating packages before using this tool." % pkgdata)
239 for recipe in sorted(os.listdir(pkgdata)): 246 for recipe in sorted(os.listdir(pkgdata)):
240 if os.path.isfile(os.path.join(pkgdata, recipe)): 247 if os.path.isfile(os.path.join(pkgdata, recipe)):
241 self.recipe_iters[recipe] = self.recipe_store.append([recipe]) 248 self.recipe_iters[recipe] = self.recipe_store.append([recipe])
diff --git a/scripts/oe-pkgdata-util b/scripts/oe-pkgdata-util
index 75dd23efa3..44ae40549a 100755
--- a/scripts/oe-pkgdata-util
+++ b/scripts/oe-pkgdata-util
@@ -96,7 +96,7 @@ def glob(args):
96 pn = os.path.basename(pkgdata_file) 96 pn = os.path.basename(pkgdata_file)
97 with open(pkgdata_file, 'r') as f: 97 with open(pkgdata_file, 'r') as f:
98 for line in f: 98 for line in f:
99 if line.startswith("PKG_%s:" % pn): 99 if line.startswith("PKG:%s:" % pn):
100 renamed = line.split(': ')[1].rstrip() 100 renamed = line.split(': ')[1].rstrip()
101 return renamed 101 return renamed
102 102
@@ -171,7 +171,7 @@ def read_value(args):
171 val = line.split(': ', 1)[1].rstrip() 171 val = line.split(': ', 1)[1].rstrip()
172 return val 172 return val
173 173
174 logger.debug("read-value('%s', '%s' '%s')" % (args.pkgdata_dir, args.valuename, packages)) 174 logger.debug("read-value('%s', '%s' '%s')" % (args.pkgdata_dir, args.valuenames, packages))
175 for package in packages: 175 for package in packages:
176 pkg_split = package.split('_') 176 pkg_split = package.split('_')
177 pkg_name = pkg_split[0] 177 pkg_name = pkg_split[0]
@@ -180,20 +180,29 @@ def read_value(args):
180 logger.debug(revlink) 180 logger.debug(revlink)
181 if os.path.exists(revlink): 181 if os.path.exists(revlink):
182 mappedpkg = os.path.basename(os.readlink(revlink)) 182 mappedpkg = os.path.basename(os.readlink(revlink))
183 qvar = args.valuename 183 qvars = args.valuenames
184 value = readvar(revlink, qvar, mappedpkg) 184 val_names = qvars.split(',')
185 if qvar == "PKGSIZE": 185 values = []
186 # PKGSIZE is now in bytes, but we we want it in KB 186 for qvar in val_names:
187 pkgsize = (int(value) + 1024 // 2) // 1024 187 if qvar == "PACKAGE":
188 value = "%d" % pkgsize 188 value = mappedpkg
189 if args.unescape: 189 else:
190 import codecs 190 value = readvar(revlink, qvar, mappedpkg)
191 # escape_decode() unescapes backslash encodings in byte streams 191 if qvar == "PKGSIZE":
192 value = codecs.escape_decode(bytes(value, "utf-8"))[0].decode("utf-8") 192 # PKGSIZE is now in bytes, but we we want it in KB
193 pkgsize = (int(value) + 1024 // 2) // 1024
194 value = "%d" % pkgsize
195 if args.unescape:
196 import codecs
197 # escape_decode() unescapes backslash encodings in byte streams
198 value = codecs.escape_decode(bytes(value, "utf-8"))[0].decode("utf-8")
199 values.append(value)
200
201 values_str = ' '.join(values)
193 if args.prefix_name: 202 if args.prefix_name:
194 print('%s %s' % (pkg_name, value)) 203 print('%s %s' % (pkg_name, values_str))
195 else: 204 else:
196 print(value) 205 print(values_str)
197 else: 206 else:
198 logger.debug("revlink %s does not exist", revlink) 207 logger.debug("revlink %s does not exist", revlink)
199 208
@@ -213,7 +222,7 @@ def lookup_pkglist(pkgs, pkgdata_dir, reverse):
213 with open(pkgfile, 'r') as f: 222 with open(pkgfile, 'r') as f:
214 for line in f: 223 for line in f:
215 fields = line.rstrip().split(': ') 224 fields = line.rstrip().split(': ')
216 if fields[0] == 'PKG_%s' % pkg: 225 if fields[0] == 'PKG:%s' % pkg:
217 mappings[pkg].append(fields[1]) 226 mappings[pkg].append(fields[1])
218 break 227 break
219 return mappings 228 return mappings
@@ -287,7 +296,7 @@ def package_info(args):
287 extra = '' 296 extra = ''
288 for line in f: 297 for line in f:
289 for var in vars: 298 for var in vars:
290 m = re.match(var + '(?:_\S+)?:\s*(.+?)\s*$', line) 299 m = re.match(var + r'(?::\S+)?:\s*(.+?)\s*$', line)
291 if m: 300 if m:
292 vals[var] = m.group(1) 301 vals[var] = m.group(1)
293 pkg_version = vals['PKGV'] or '' 302 pkg_version = vals['PKGV'] or ''
@@ -431,7 +440,7 @@ def list_pkg_files(args):
431 for line in f: 440 for line in f:
432 if line.startswith('FILES_INFO:'): 441 if line.startswith('FILES_INFO:'):
433 found = True 442 found = True
434 val = line.split(':', 1)[1].strip() 443 val = line.split(': ', 1)[1].strip()
435 dictval = json.loads(val) 444 dictval = json.loads(val)
436 if long: 445 if long:
437 width = max(map(len, dictval), default=0) 446 width = max(map(len, dictval), default=0)
@@ -500,7 +509,7 @@ def find_path(args):
500 with open(os.path.join(root,fn)) as f: 509 with open(os.path.join(root,fn)) as f:
501 for line in f: 510 for line in f:
502 if line.startswith('FILES_INFO:'): 511 if line.startswith('FILES_INFO:'):
503 val = line.split(':', 1)[1].strip() 512 val = line.split(': ', 1)[1].strip()
504 dictval = json.loads(val) 513 dictval = json.loads(val)
505 for fullpth in dictval.keys(): 514 for fullpth in dictval.keys():
506 if fnmatch.fnmatchcase(fullpth, args.targetpath): 515 if fnmatch.fnmatchcase(fullpth, args.targetpath):
@@ -570,7 +579,7 @@ def main():
570 parser_read_value = subparsers.add_parser('read-value', 579 parser_read_value = subparsers.add_parser('read-value',
571 help='Read any pkgdata value for one or more packages', 580 help='Read any pkgdata value for one or more packages',
572 description='Reads the named value from the pkgdata files for the specified packages') 581 description='Reads the named value from the pkgdata files for the specified packages')
573 parser_read_value.add_argument('valuename', help='Name of the value to look up') 582 parser_read_value.add_argument('valuenames', help='Name of the value/s to look up (separated by commas, no spaces)')
574 parser_read_value.add_argument('pkg', nargs='*', help='Runtime package name to look up') 583 parser_read_value.add_argument('pkg', nargs='*', help='Runtime package name to look up')
575 parser_read_value.add_argument('-f', '--file', help='Read package names from the specified file (one per line, first field only)') 584 parser_read_value.add_argument('-f', '--file', help='Read package names from the specified file (one per line, first field only)')
576 parser_read_value.add_argument('-n', '--prefix-name', help='Prefix output with package name', action='store_true') 585 parser_read_value.add_argument('-n', '--prefix-name', help='Prefix output with package name', action='store_true')
diff --git a/scripts/oe-publish-sdk b/scripts/oe-publish-sdk
index deb8ae1807..b8a652e47f 100755
--- a/scripts/oe-publish-sdk
+++ b/scripts/oe-publish-sdk
@@ -107,9 +107,9 @@ def publish(args):
107 107
108 # Setting up the git repo 108 # Setting up the git repo
109 if not is_remote: 109 if not is_remote:
110 cmd = 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; cp .git/hooks/post-update.sample .git/hooks/post-commit; echo "*.pyc\n*.pyo\npyshtables.py" > .gitignore; fi; git add -A .; git config user.email "oe@oe.oe" && git config user.name "OE" && git commit -q -m "init repo" || true' % (destination, destination) 110 cmd = 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; cp .git/hooks/post-update.sample .git/hooks/post-commit; echo "*.pyc\n*.pyo\npyshtables.py" > .gitignore; fi; git config gc.auto 0; git add -A .; git config user.email "oe@oe.oe" && git config user.name "OE" && git commit -q -m "init repo" || true' % (destination, destination)
111 else: 111 else:
112 cmd = "ssh %s 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; cp .git/hooks/post-update.sample .git/hooks/post-commit; echo '*.pyc' > .gitignore; echo '*.pyo' >> .gitignore; echo 'pyshtables.py' >> .gitignore; fi; git add -A .; git config user.email 'oe@oe.oe' && git config user.name 'OE' && git commit -q -m \"init repo\" || true'" % (host, destdir, destdir) 112 cmd = "ssh %s 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; cp .git/hooks/post-update.sample .git/hooks/post-commit; echo '*.pyc' > .gitignore; echo '*.pyo' >> .gitignore; echo 'pyshtables.py' >> .gitignore; fi; git config gc.auto 0; git add -A .; git config user.email 'oe@oe.oe' && git config user.name 'OE' && git commit -q -m \"init repo\" || true'" % (host, destdir, destdir)
113 ret = subprocess.call(cmd, shell=True) 113 ret = subprocess.call(cmd, shell=True)
114 if ret == 0: 114 if ret == 0:
115 logger.info('SDK published successfully') 115 logger.info('SDK published successfully')
diff --git a/scripts/oe-pylint b/scripts/oe-pylint
index 7cc1ccb010..5ad72838e9 100755
--- a/scripts/oe-pylint
+++ b/scripts/oe-pylint
@@ -1,5 +1,7 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Run the pylint3 against our common python module spaces and print a report of potential issues 7# Run the pylint3 against our common python module spaces and print a report of potential issues
diff --git a/scripts/oe-setup-build b/scripts/oe-setup-build
new file mode 100755
index 0000000000..5364f2b481
--- /dev/null
+++ b/scripts/oe-setup-build
@@ -0,0 +1,122 @@
1#!/usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8import argparse
9import json
10import os
11import subprocess
12
13def defaultlayers():
14 return os.path.abspath(os.path.join(os.path.dirname(__file__), '.oe-layers.json'))
15
16def makebuildpath(topdir, template):
17 return os.path.join(topdir, "build-{}".format(template))
18
19def discover_templates(layers_file):
20 if not os.path.exists(layers_file):
21 print("List of layers {} does not exist; were the layers set up using the setup-layers script?".format(layers_file))
22 return None
23
24 templates = []
25 layers_list = json.load(open(layers_file))["layers"]
26 for layer in layers_list:
27 template_dir = os.path.join(os.path.dirname(layers_file), layer, 'conf','templates')
28 if os.path.exists(template_dir):
29 for d in sorted(os.listdir(template_dir)):
30 templatepath = os.path.join(template_dir,d)
31 if not os.path.isfile(os.path.join(templatepath,'local.conf.sample')):
32 continue
33 layer_base = os.path.basename(layer)
34 templatename = "{}-{}".format(layer_base[5:] if layer_base.startswith("meta-") else layer_base, d)
35 buildpath = makebuildpath(os.getcwd(), templatename)
36 notespath = os.path.join(template_dir, d, 'conf-notes.txt')
37 try: notes = open(notespath).read()
38 except: notes = None
39 try: summary = open(os.path.join(template_dir, d, 'conf-summary.txt')).read()
40 except: summary = None
41 templates.append({"templatename":templatename,"templatepath":templatepath,"buildpath":buildpath,"notespath":notespath,"notes":notes,"summary":summary})
42
43 return templates
44
45def print_templates(templates, verbose):
46 print("Available build configurations:\n")
47
48 for i in range(len(templates)):
49 t = templates[i]
50 print("{}. {}".format(i+1, t["templatename"]))
51 print("{}".format(t["summary"].strip() if t["summary"] else "This configuration does not have a summary."))
52 if verbose:
53 print("Configuration template path:", t["templatepath"])
54 print("Build path:", t["buildpath"])
55 print("Usage notes:", t["notespath"] if t["notes"] else "This configuration does not have usage notes.")
56 print("")
57 if not verbose:
58 print("Re-run with 'list -v' to see additional information.")
59
60def list_templates(args):
61 templates = discover_templates(args.layerlist)
62 if not templates:
63 return
64
65 verbose = args.v
66 print_templates(templates, verbose)
67
68def find_template(template_name, templates):
69 print_templates(templates, False)
70 if not template_name:
71 n_s = input("Please choose a configuration by its number: ")
72 try: return templates[int(n_s) - 1]
73 except:
74 print("Invalid selection, please try again.")
75 return None
76 else:
77 for t in templates:
78 if t["templatename"] == template_name:
79 return t
80 print("Configuration {} is not one of {}, please try again.".format(tempalte_name, [t["templatename"] for t in templates]))
81 return None
82
83def setup_build_env(args):
84 templates = discover_templates(args.layerlist)
85 if not templates:
86 return
87
88 template = find_template(args.c, templates)
89 if not template:
90 return
91 builddir = args.b if args.b else template["buildpath"]
92 no_shell = args.no_shell
93 coredir = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
94 cmd = "TEMPLATECONF={} . {} {}".format(template["templatepath"], os.path.join(coredir, 'oe-init-build-env'), builddir)
95 if not no_shell:
96 cmd = cmd + " && {}".format(os.environ['SHELL'])
97 print("Running:", cmd)
98 subprocess.run(cmd, shell=True, executable=os.environ['SHELL'])
99
100parser = argparse.ArgumentParser(description="A script that discovers available build configurations and sets up a build environment based on one of them. Run without arguments to choose one interactively.")
101parser.add_argument("--layerlist", default=defaultlayers(), help='Where to look for available layers (as written out by setup-layers script) (default is {}).'.format(defaultlayers()))
102
103subparsers = parser.add_subparsers()
104parser_list_templates = subparsers.add_parser('list', help='List available configurations')
105parser_list_templates.add_argument('-v', action='store_true',
106 help='Print detailed information and usage notes for each available build configuration.')
107parser_list_templates.set_defaults(func=list_templates)
108
109parser_setup_env = subparsers.add_parser('setup', help='Set up a build environment and open a shell session with it, ready to run builds.')
110parser_setup_env.add_argument('-c', metavar='configuration_name', help="Use a build configuration configuration_name to set up a build environment (run this script with 'list' to see what is available)")
111parser_setup_env.add_argument('-b', metavar='build_path', help="Set up a build directory in build_path (run this script with 'list -v' to see where it would be by default)")
112parser_setup_env.add_argument('--no-shell', action='store_true',
113 help='Create a build directory but do not start a shell session with the build environment from it.')
114parser_setup_env.set_defaults(func=setup_build_env)
115
116args = parser.parse_args()
117
118if 'func' in args:
119 args.func(args)
120else:
121 from argparse import Namespace
122 setup_build_env(Namespace(layerlist=args.layerlist, c=None, b=None, no_shell=False))
diff --git a/scripts/oe-setup-builddir b/scripts/oe-setup-builddir
index 30eaa8efbe..dcb384c33a 100755
--- a/scripts/oe-setup-builddir
+++ b/scripts/oe-setup-builddir
@@ -7,12 +7,14 @@
7# SPDX-License-Identifier: GPL-2.0-or-later 7# SPDX-License-Identifier: GPL-2.0-or-later
8# 8#
9 9
10if [ -z "$BUILDDIR" ]; then 10die() {
11 echo >&2 "Error: The build directory (BUILDDIR) must be set!" 11 echo Error: "$@" >&2
12 exit 1 12 exit 1
13fi 13}
14
15[ -n "$BUILDDIR" ] || die "The build directory (BUILDDIR) must be set!"
14 16
15if [ "$1" = '--help' -o "$1" = '-h' ]; then 17if [ "$1" = '--help' ] || [ "$1" = '-h' ]; then
16 echo 'Usage: oe-setup-builddir' 18 echo 'Usage: oe-setup-builddir'
17 echo '' 19 echo ''
18 echo "OpenEmbedded setup-builddir - setup build directory $BUILDDIR" 20 echo "OpenEmbedded setup-builddir - setup build directory $BUILDDIR"
@@ -22,33 +24,22 @@ fi
22 24
23mkdir -p "$BUILDDIR/conf" 25mkdir -p "$BUILDDIR/conf"
24 26
25if [ ! -d "$BUILDDIR" ]; then 27[ -d "$BUILDDIR" ] || die "The build directory ($BUILDDIR) does not exist!"
26 echo >&2 "Error: The builddir ($BUILDDIR) does not exist!" 28[ -w "$BUILDDIR" ] ||
27 exit 1 29 die "Cannot write to $BUILDDIR, perhaps try sourcing with a writable path? i.e. . oe-init-build-env ~/my-build"
28fi
29
30if [ ! -w "$BUILDDIR" ]; then
31 echo >&2 "Error: Cannot write to $BUILDDIR, perhaps try sourcing with a writable path? i.e. . oe-init-build-env ~/my-build"
32 exit 1
33fi
34 30
35# Attempting removal of sticky,setuid bits from BUILDDIR, BUILDDIR/conf 31# Attempting removal of sticky,setuid bits from BUILDDIR, BUILDDIR/conf
36chmod -st "$BUILDDIR" 2>/dev/null || echo "WARNING: unable to chmod $BUILDDIR" 32chmod -st "$BUILDDIR" 2>/dev/null || echo "WARNING: unable to chmod $BUILDDIR"
37chmod -st "$BUILDDIR/conf" 2>/dev/null || echo "WARNING: unable to chmod $BUILDDIR/conf" 33chmod -st "$BUILDDIR/conf" 2>/dev/null || echo "WARNING: unable to chmod $BUILDDIR/conf"
38 34
39cd "$BUILDDIR" 35cd "$BUILDDIR" || die "Failed to change directory to $BUILDDIR!"
40 36
41if [ -f "$BUILDDIR/conf/templateconf.cfg" ]; then 37. "$OEROOT/.templateconf"
42 TEMPLATECONF=$(cat "$BUILDDIR/conf/templateconf.cfg")
43fi
44
45. $OEROOT/.templateconf
46 38
47if [ ! -f "$BUILDDIR/conf/templateconf.cfg" ]; then 39# Keep the original TEMPLATECONF before possibly prefixing it with $OEROOT below.
48 echo "$TEMPLATECONF" >"$BUILDDIR/conf/templateconf.cfg" 40ORG_TEMPLATECONF=$TEMPLATECONF
49fi
50 41
51# 42#
52# $TEMPLATECONF can point to a directory for the template local.conf & bblayers.conf 43# $TEMPLATECONF can point to a directory for the template local.conf & bblayers.conf
53# 44#
54if [ -n "$TEMPLATECONF" ]; then 45if [ -n "$TEMPLATECONF" ]; then
@@ -57,73 +48,94 @@ if [ -n "$TEMPLATECONF" ]; then
57 if [ -d "$OEROOT/$TEMPLATECONF" ]; then 48 if [ -d "$OEROOT/$TEMPLATECONF" ]; then
58 TEMPLATECONF="$OEROOT/$TEMPLATECONF" 49 TEMPLATECONF="$OEROOT/$TEMPLATECONF"
59 fi 50 fi
60 if [ ! -d "$TEMPLATECONF" ]; then 51 [ -d "$TEMPLATECONF" ] ||
61 echo >&2 "Error: TEMPLATECONF value points to nonexistent directory '$TEMPLATECONF'" 52 die "TEMPLATECONF value points to nonexistent directory '$TEMPLATECONF'"
62 exit 1 53 fi
63 fi 54 templatesdir=$(python3 -c "import sys; print(sys.argv[1].strip('/').split('/')[-2])" "$TEMPLATECONF")
55 if [ "$templatesdir" != templates ] || [ ! -f "$TEMPLATECONF/../../layer.conf" ]; then
56 die "TEMPLATECONF value (which is $TEMPLATECONF) must point to meta-some-layer/conf/templates/template-name"
64 fi 57 fi
65 OECORELAYERCONF="$TEMPLATECONF/bblayers.conf.sample" 58 OECORELAYERCONF="$TEMPLATECONF/bblayers.conf.sample"
66 OECORELOCALCONF="$TEMPLATECONF/local.conf.sample" 59 OECORELOCALCONF="$TEMPLATECONF/local.conf.sample"
60 OECORESUMMARYCONF="$TEMPLATECONF/conf-summary.txt"
67 OECORENOTESCONF="$TEMPLATECONF/conf-notes.txt" 61 OECORENOTESCONF="$TEMPLATECONF/conf-notes.txt"
68fi 62fi
69 63
70unset SHOWYPDOC 64unset SHOWYPDOC
71if [ -z "$OECORELOCALCONF" ]; then 65if [ -z "$OECORELOCALCONF" ]; then
72 OECORELOCALCONF="$OEROOT/meta/conf/local.conf.sample" 66 OECORELOCALCONF="$OEROOT/meta/conf/templates/default/local.conf.sample"
73fi 67fi
74if [ ! -r "$BUILDDIR/conf/local.conf" ]; then 68if [ ! -r "$BUILDDIR/conf/local.conf" ]; then
75 cat <<EOM 69 cat <<EOM
76You had no conf/local.conf file. This configuration file has therefore been 70You had no conf/local.conf file. This configuration file has therefore been
77created for you with some default values. You may wish to edit it to, for 71created for you from $OECORELOCALCONF
78example, select a different MACHINE (target hardware). See conf/local.conf 72You may wish to edit it to, for example, select a different MACHINE (target
79for more information as common configuration options are commented. 73hardware).
80 74
81EOM 75EOM
82 cp -f $OECORELOCALCONF "$BUILDDIR/conf/local.conf" 76 cp -f "$OECORELOCALCONF" "$BUILDDIR/conf/local.conf"
83 SHOWYPDOC=yes 77 SHOWYPDOC=yes
84fi 78fi
85 79
86if [ -z "$OECORELAYERCONF" ]; then 80if [ -z "$OECORELAYERCONF" ]; then
87 OECORELAYERCONF="$OEROOT/meta/conf/bblayers.conf.sample" 81 OECORELAYERCONF="$OEROOT/meta/conf/templates/default/bblayers.conf.sample"
88fi 82fi
89if [ ! -r "$BUILDDIR/conf/bblayers.conf" ]; then 83if [ ! -r "$BUILDDIR/conf/bblayers.conf" ]; then
90 cat <<EOM 84 cat <<EOM
91You had no conf/bblayers.conf file. This configuration file has therefore been 85You had no conf/bblayers.conf file. This configuration file has therefore been
92created for you with some default values. To add additional metadata layers 86created for you from $OECORELAYERCONF
93into your configuration please add entries to conf/bblayers.conf. 87To add additional metadata layers into your configuration please add entries
88to conf/bblayers.conf.
94 89
95EOM 90EOM
96 91
97 # Put the abosolute path to the layers in bblayers.conf so we can run 92 # Put the absolute path to the layers in bblayers.conf so we can run
98 # bitbake without the init script after the first run 93 # bitbake without the init script after the first run.
99 # ##COREBASE## is deprecated as it's meaning was inconsistent, but continue 94 # ##COREBASE## is deprecated as its meaning was inconsistent, but continue
100 # to replace it for compatibility. 95 # to replace it for compatibility.
101 sed -e "s|##OEROOT##|$OEROOT|g" \ 96 sed -e "s|##OEROOT##|$OEROOT|g" \
102 -e "s|##COREBASE##|$OEROOT|g" \ 97 -e "s|##COREBASE##|$OEROOT|g" \
103 $OECORELAYERCONF > "$BUILDDIR/conf/bblayers.conf" 98 "$OECORELAYERCONF" > "$BUILDDIR/conf/bblayers.conf"
104 SHOWYPDOC=yes 99 SHOWYPDOC=yes
105fi 100fi
106 101
102if [ -z "$OECORESUMMARYCONF" ]; then
103 OECORESUMMARYCONF="$OEROOT/meta/conf/templates/default/conf-summary.txt"
104fi
105if [ ! -r "$BUILDDIR/conf/conf-summary.txt" ]; then
106 [ ! -r "$OECORESUMMARYCONF" ] || cp "$OECORESUMMARYCONF" "$BUILDDIR/conf/conf-summary.txt"
107fi
108
109if [ -z "$OECORENOTESCONF" ]; then
110 OECORENOTESCONF="$OEROOT/meta/conf/templates/default/conf-notes.txt"
111fi
112if [ ! -r "$BUILDDIR/conf/conf-notes.txt" ]; then
113 [ ! -r "$OECORENOTESCONF" ] || cp "$OECORENOTESCONF" "$BUILDDIR/conf/conf-notes.txt"
114fi
115
107# Prevent disturbing a new GIT clone in same console 116# Prevent disturbing a new GIT clone in same console
108unset OECORELOCALCONF 117unset OECORELOCALCONF
109unset OECORELAYERCONF 118unset OECORELAYERCONF
119unset OECORESUMMARYCONF
120unset OECORENOTESCONF
110 121
111# Ending the first-time run message. Show the YP Documentation banner. 122# Ending the first-time run message. Show the YP Documentation banner.
112if [ ! -z "$SHOWYPDOC" ]; then 123if [ -n "$SHOWYPDOC" ]; then
113 cat <<EOM 124 cat <<EOM
114The Yocto Project has extensive documentation about OE including a reference 125The Yocto Project has extensive documentation about OE including a reference
115manual which can be found at: 126manual which can be found at:
116 http://yoctoproject.org/documentation 127 https://docs.yoctoproject.org
117 128
118For more information about OpenEmbedded see their website: 129For more information about OpenEmbedded see the website:
119 http://www.openembedded.org/ 130 https://www.openembedded.org/
120 131
121EOM 132EOM
122# unset SHOWYPDOC 133# unset SHOWYPDOC
123fi 134fi
124 135
125if [ -z "$OECORENOTESCONF" ]; then 136[ ! -r "$BUILDDIR/conf/conf-summary.txt" ] || cat "$BUILDDIR/conf/conf-summary.txt"
126 OECORENOTESCONF="$OEROOT/meta/conf/conf-notes.txt" 137[ ! -r "$BUILDDIR/conf/conf-notes.txt" ] || cat "$BUILDDIR/conf/conf-notes.txt"
138
139if [ ! -f "$BUILDDIR/conf/templateconf.cfg" ]; then
140 echo "$ORG_TEMPLATECONF" >"$BUILDDIR/conf/templateconf.cfg"
127fi 141fi
128[ ! -r "$OECORENOTESCONF" ] || cat $OECORENOTESCONF
129unset OECORENOTESCONF
diff --git a/scripts/oe-setup-layers b/scripts/oe-setup-layers
new file mode 100755
index 0000000000..6fbfefd656
--- /dev/null
+++ b/scripts/oe-setup-layers
@@ -0,0 +1,146 @@
1#!/usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8# This file was copied from poky(or oe-core)/scripts/oe-setup-layers by running
9#
10# bitbake-layers create-layers-setup destdir
11#
12# It is recommended that you do not modify this file directly, but rather re-run the above command to get the freshest upstream copy.
13#
14# This script is idempotent. Subsequent runs only change what is necessary to
15# ensure your layers match your configuration.
16
17import argparse
18import json
19import os
20import subprocess
21
22def _is_repo_git_repo(repodir):
23 try:
24 curr_toplevel = subprocess.check_output("git -C %s rev-parse --show-toplevel" % repodir, shell=True, stderr=subprocess.DEVNULL)
25 if curr_toplevel.strip().decode("utf-8") == repodir:
26 return True
27 except subprocess.CalledProcessError:
28 pass
29 return False
30
31def _is_repo_at_rev(repodir, rev):
32 try:
33 curr_rev = subprocess.check_output("git -C %s rev-parse HEAD" % repodir, shell=True, stderr=subprocess.DEVNULL)
34 if curr_rev.strip().decode("utf-8") == rev:
35 return True
36 except subprocess.CalledProcessError:
37 pass
38 return False
39
40def _is_repo_at_remote_uri(repodir, remote, uri):
41 try:
42 curr_uri = subprocess.check_output("git -C %s remote get-url %s" % (repodir, remote), shell=True, stderr=subprocess.DEVNULL)
43 if curr_uri.strip().decode("utf-8") == uri:
44 return True
45 except subprocess.CalledProcessError:
46 pass
47 return False
48
49def _contains_submodules(repodir):
50 return os.path.exists(os.path.join(repodir,".gitmodules"))
51
52def _write_layer_list(dest, repodirs):
53 layers = []
54 for r in repodirs:
55 for root, dirs, files in os.walk(r):
56 if os.path.basename(root) == 'conf' and 'layer.conf' in files:
57 layers.append(os.path.relpath(os.path.dirname(root), dest))
58 layers_f = os.path.join(dest, ".oe-layers.json")
59 print("Writing list of layers into {}".format(layers_f))
60 with open(layers_f, 'w') as f:
61 json.dump({"version":"1.0","layers":layers}, f, sort_keys=True, indent=4)
62
63def _do_checkout(args, json):
64 repos = json['sources']
65 repodirs = []
66 oesetupbuild = None
67 for r_name in repos:
68 r_data = repos[r_name]
69 repodir = os.path.abspath(os.path.join(args['destdir'], r_data['path']))
70 repodirs.append(repodir)
71
72 if 'contains_this_file' in r_data.keys():
73 force_arg = 'force_bootstraplayer_checkout'
74 if not args[force_arg]:
75 print('Note: not checking out source {repo}, use {repoflag} to override.'.format(repo=r_name, repoflag='--force-bootstraplayer-checkout'))
76 continue
77 r_remote = r_data['git-remote']
78 rev = r_remote['rev']
79 desc = r_remote['describe']
80 if not desc:
81 desc = rev[:10]
82 branch = r_remote['branch']
83 remotes = r_remote['remotes']
84
85 print('\nSetting up source {}, revision {}, branch {}'.format(r_name, desc, branch))
86 if not _is_repo_git_repo(repodir):
87 cmd = 'git init -q {}'.format(repodir)
88 print("Running '{}'".format(cmd))
89 subprocess.check_output(cmd, shell=True)
90
91 for remote in remotes:
92 if not _is_repo_at_remote_uri(repodir, remote, remotes[remote]['uri']):
93 cmd = "git remote remove {} > /dev/null 2>&1; git remote add {} {}".format(remote, remote, remotes[remote]['uri'])
94 print("Running '{}' in {}".format(cmd, repodir))
95 subprocess.check_output(cmd, shell=True, cwd=repodir)
96
97 cmd = "git fetch -q {} || true".format(remote)
98 print("Running '{}' in {}".format(cmd, repodir))
99 subprocess.check_output(cmd, shell=True, cwd=repodir)
100
101 if not _is_repo_at_rev(repodir, rev):
102 cmd = "git fetch -q --all || true"
103 print("Running '{}' in {}".format(cmd, repodir))
104 subprocess.check_output(cmd, shell=True, cwd=repodir)
105
106 cmd = 'git checkout -q {}'.format(rev)
107 print("Running '{}' in {}".format(cmd, repodir))
108 subprocess.check_output(cmd, shell=True, cwd=repodir)
109
110 if _contains_submodules(repodir):
111 print("Repo {} contains submodules, use 'git submodule update' to ensure they are up to date".format(repodir))
112 if os.path.exists(os.path.join(repodir, 'scripts/oe-setup-build')):
113 oesetupbuild = os.path.join(repodir, 'scripts/oe-setup-build')
114
115 _write_layer_list(args['destdir'], repodirs)
116
117 if oesetupbuild:
118 oesetupbuild_symlink = os.path.join(args['destdir'], 'setup-build')
119 if os.path.exists(oesetupbuild_symlink):
120 os.remove(oesetupbuild_symlink)
121 os.symlink(os.path.relpath(oesetupbuild,args['destdir']),oesetupbuild_symlink)
122 print("\nRun '{}' to list available build configuration templates and set up a build from one of them.".format(oesetupbuild_symlink))
123
124parser = argparse.ArgumentParser(description="A self contained python script that fetches all the needed layers and sets them to correct revisions using data in a json format from a separate file. The json data can be created from an active build directory with 'bitbake-layers create-layers-setup destdir' and there's a sample file and a schema in meta/files/")
125
126parser.add_argument('--force-bootstraplayer-checkout', action='store_true',
127 help='Force the checkout of the layer containing this file (by default it is presumed that as this script is in it, the layer is already in place).')
128
129try:
130 defaultdest = os.path.dirname(subprocess.check_output('git rev-parse --show-toplevel', universal_newlines=True, shell=True, cwd=os.path.dirname(__file__)))
131except subprocess.CalledProcessError as e:
132 defaultdest = os.path.abspath(".")
133
134parser.add_argument('--destdir', default=defaultdest, help='Where to check out the layers (default is {defaultdest}).'.format(defaultdest=defaultdest))
135parser.add_argument('--jsondata', default=__file__+".json", help='File containing the layer data in json format (default is {defaultjson}).'.format(defaultjson=__file__+".json"))
136
137args = parser.parse_args()
138
139with open(args.jsondata) as f:
140 json_f = json.load(f)
141
142supported_versions = ["1.0"]
143if json_f["version"] not in supported_versions:
144 raise Exception("File {} has version {}, which is not in supported versions: {}".format(args.jsondata, json_f["version"], supported_versions))
145
146_do_checkout(vars(args), json_f)
diff --git a/scripts/oe-setup-vscode b/scripts/oe-setup-vscode
new file mode 100755
index 0000000000..b8642780d5
--- /dev/null
+++ b/scripts/oe-setup-vscode
@@ -0,0 +1,93 @@
1#!/bin/sh
2
3usage() {
4 echo "$0 <OEINIT> <BUILDDIR>"
5 echo " OEINIT: path to directory where the .vscode folder is"
6 echo " BUILDDIR: directory passed to the oe-init-setup-env script"
7}
8
9if [ "$#" -ne 2 ]; then
10 usage
11 exit 1
12fi
13
14OEINIT=$(readlink -f "$1")
15BUILDDIR=$(readlink -f "$2")
16VSCODEDIR=$OEINIT/.vscode
17
18if [ ! -d "$OEINIT" ] || [ ! -d "$BUILDDIR" ]; then
19 echo "$OEINIT and/or $BUILDDIR directories are not present."
20 exit 1
21fi
22
23VSCODE_SETTINGS=$VSCODEDIR/settings.json
24ws_builddir="$(echo "$BUILDDIR" | sed -e "s|$OEINIT|\${workspaceFolder}|g")"
25
26# If BUILDDIR is in scope of VSCode ensure VSCode does not try to index the build folder.
27# This would lead to a busy CPU and finally to an OOM exception.
28mkdir -p "$VSCODEDIR"
29cat <<EOMsettings > "$VSCODE_SETTINGS"
30{
31 "bitbake.pathToBitbakeFolder": "\${workspaceFolder}/bitbake",
32 "bitbake.pathToEnvScript": "\${workspaceFolder}/oe-init-build-env",
33 "bitbake.pathToBuildFolder": "$ws_builddir",
34 "bitbake.commandWrapper": "",
35 "bitbake.workingDirectory": "\${workspaceFolder}",
36 "files.exclude": {
37 "**/.git/**": true,
38 "**/_build/**": true,
39 "**/buildhistory/**": true,
40 "**/cache/**": true,
41 "**/downloads/**": true,
42 "**/node_modules/**": true,
43 "**/oe-logs/**": true,
44 "**/oe-workdir/**": true,
45 "**/sstate-cache/**": true,
46 "**/tmp*/**": true,
47 "**/workspace/attic/**": true,
48 "**/workspace/sources/**": true
49 },
50 "files.watcherExclude": {
51 "**/.git/**": true,
52 "**/_build/**": true,
53 "**/buildhistory/**": true,
54 "**/cache/**": true,
55 "**/downloads/**": true,
56 "**/node_modules/**": true,
57 "**/oe-logs/**": true,
58 "**/oe-workdir/**": true,
59 "**/sstate-cache/**": true,
60 "**/tmp*/**": true,
61 "**/workspace/attic/**": true,
62 "**/workspace/sources/**": true
63 },
64 "python.analysis.exclude": [
65 "**/_build/**",
66 "**/.git/**",
67 "**/buildhistory/**",
68 "**/cache/**",
69 "**/downloads/**",
70 "**/node_modules/**",
71 "**/oe-logs/**",
72 "**/oe-workdir/**",
73 "**/sstate-cache/**",
74 "**/tmp*/**",
75 "**/workspace/attic/**",
76 "**/workspace/sources/**"
77 ]
78}
79EOMsettings
80
81
82# Ask the user if the yocto-bitbake extension should be installed
83VSCODE_EXTENSIONS=$VSCODEDIR/extensions.json
84cat <<EOMextensions > "$VSCODE_EXTENSIONS"
85{
86 "recommendations": [
87 "yocto-project.yocto-bitbake"
88 ]
89}
90EOMextensions
91
92echo "You had no $VSCODEDIR configuration."
93echo "These configuration files have therefore been created for you."
diff --git a/scripts/oe-time-dd-test.sh b/scripts/oe-time-dd-test.sh
new file mode 100755
index 0000000000..81748b8c9e
--- /dev/null
+++ b/scripts/oe-time-dd-test.sh
@@ -0,0 +1,106 @@
1#!/bin/bash
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7# oe-time-dd-test records how much time it takes to
8# write <count> number of kilobytes to the filesystem.
9# It also records the number of processes that are in
10# running (R), uninterruptible sleep (D) and interruptible
11# sleep (S) state from the output of "top" command.
12# The purporse of this script is to find which part of
13# the build system puts stress on the filesystem io and
14# log all the processes.
15usage() {
16 echo "$0 is used to detect i/o latency and runs commands to display host information."
17 echo "The following commands are run in order:"
18 echo "1) top -c -b -n1 -w 512"
19 echo "2) iostat -y -z -x 5 1"
20 echo "3) tail -30 tmp*/log/cooker/*/console-latest.log to gather cooker log."
21 echo " "
22 echo "Options:"
23 echo "-c | --count <amount> dd (transfer) <amount> KiB of data within specified timeout to detect latency."
24 echo " Must enable -t option."
25 echo "-t | --timeout <time> timeout in seconds for the <count> amount of data to be transferred."
26 echo "-l | --log-only run the commands without performing the data transfer."
27 echo "-h | --help show help"
28
29}
30
31run_cmds() {
32 echo "start: top output"
33 top -c -b -n1 -w 512
34 echo "end: top output"
35 echo "start: iostat"
36 iostat -y -z -x 5 1
37 echo "end: iostat"
38 echo "start: cooker log"
39 tail -30 tmp*/log/cooker/*/console-latest.log
40 echo "end: cooker log"
41}
42
43if [ $# -lt 1 ]; then
44 usage
45 exit 1
46fi
47
48re_c='^[0-9]+$'
49#re_t='^[0-9]+([.][0-9]+)?$'
50
51while [[ $# -gt 0 ]]; do
52 key="$1"
53
54 case $key in
55 -c|--count)
56 COUNT=$2
57 shift
58 shift
59 if ! [[ $COUNT =~ $re_c ]] || [[ $COUNT -le 0 ]] ; then
60 usage
61 exit 1
62 fi
63 ;;
64 -t|--timeout)
65 TIMEOUT=$2
66 shift
67 shift
68 if ! [[ $TIMEOUT =~ $re_c ]] || [[ $TIMEOUT -le 0 ]] ; then
69 usage
70 exit 1
71 fi
72 ;;
73 -l|--log-only)
74 LOG_ONLY="true"
75 shift
76 shift
77 ;;
78 -h|--help)
79 usage
80 exit 0
81 ;;
82 *)
83 usage
84 exit 1
85 ;;
86 esac
87done
88
89
90if [ "$LOG_ONLY" = "true" ] ; then
91 uptime
92 run_cmds
93 exit
94fi
95
96if [ -z ${TIMEOUT+x} ] || [ -z ${COUNT+x} ] ; then
97 usage
98 exit 1
99fi
100
101uptime
102echo "Timeout used: ${TIMEOUT}"
103timeout ${TIMEOUT} dd if=/dev/zero of=oe-time-dd-test.dat bs=1024 count=${COUNT} conv=fsync
104if [ $? -ne 0 ]; then
105 run_cmds
106fi
diff --git a/scripts/oe-trim-schemas b/scripts/oe-trim-schemas
index bf77c8cf64..e3b26e273e 100755
--- a/scripts/oe-trim-schemas
+++ b/scripts/oe-trim-schemas
@@ -1,5 +1,7 @@
1#! /usr/bin/env python3 1#! /usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
diff --git a/scripts/oepydevshell-internal.py b/scripts/oepydevshell-internal.py
index 96c078ef3d..3bf7df1114 100755
--- a/scripts/oepydevshell-internal.py
+++ b/scripts/oepydevshell-internal.py
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
@@ -43,7 +45,7 @@ nonblockingfd(pty)
43nonblockingfd(sys.stdin) 45nonblockingfd(sys.stdin)
44 46
45 47
46histfile = os.path.expanduser("~/.oedevpyshell-history") 48histfile = os.path.expanduser("~/.oepydevshell-history")
47readline.parse_and_bind("tab: complete") 49readline.parse_and_bind("tab: complete")
48try: 50try:
49 readline.read_history_file(histfile) 51 readline.read_history_file(histfile)
diff --git a/scripts/opkg-query-helper.py b/scripts/opkg-query-helper.py
index bc3ab43823..084d9ef684 100755
--- a/scripts/opkg-query-helper.py
+++ b/scripts/opkg-query-helper.py
@@ -29,7 +29,7 @@ for arg in sys.argv[1:]:
29 args.append(arg) 29 args.append(arg)
30 30
31# Regex for removing version specs after dependency items 31# Regex for removing version specs after dependency items
32verregex = re.compile(' \([=<>]* [^ )]*\)') 32verregex = re.compile(r' \([=<>]* [^ )]*\)')
33 33
34pkg = "" 34pkg = ""
35ver = "" 35ver = ""
diff --git a/scripts/patchtest b/scripts/patchtest
new file mode 100755
index 0000000000..0be7062dc2
--- /dev/null
+++ b/scripts/patchtest
@@ -0,0 +1,232 @@
1#!/usr/bin/env python3
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# patchtest: execute all unittest test cases discovered for a single patch
6#
7# Copyright (C) 2016 Intel Corporation
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12import sys
13import os
14import unittest
15import logging
16import traceback
17import json
18
19# Include current path so test cases can see it
20sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
21
22# Include patchtest library
23sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '../meta/lib/patchtest'))
24
25from data import PatchTestInput
26from repo import PatchTestRepo
27
28import utils
29logger = utils.logger_create('patchtest')
30info = logger.info
31error = logger.error
32
33import repo
34
35def getResult(patch, mergepatch, logfile=None):
36
37 class PatchTestResult(unittest.TextTestResult):
38 """ Patchtest TextTestResult """
39 shouldStop = True
40 longMessage = False
41
42 success = 'PASS'
43 fail = 'FAIL'
44 skip = 'SKIP'
45
46 def startTestRun(self):
47 # let's create the repo already, it can be used later on
48 repoargs = {
49 'repodir': PatchTestInput.repodir,
50 'commit' : PatchTestInput.basecommit,
51 'branch' : PatchTestInput.basebranch,
52 'patch' : patch,
53 }
54
55 self.repo_error = False
56 self.test_error = False
57 self.test_failure = False
58
59 try:
60 self.repo = PatchTestInput.repo = PatchTestRepo(**repoargs)
61 except:
62 logger.error(traceback.print_exc())
63 self.repo_error = True
64 self.stop()
65 return
66
67 if mergepatch:
68 self.repo.merge()
69
70 def addError(self, test, err):
71 self.test_error = True
72 (ty, va, trace) = err
73 logger.error(traceback.print_exc())
74
75 def addFailure(self, test, err):
76 test_description = test.id().split('.')[-1].replace('_', ' ').replace("cve", "CVE").replace("signed off by",
77 "Signed-off-by").replace("upstream status",
78 "Upstream-Status").replace("non auh",
79 "non-AUH").replace("presence format", "presence")
80 self.test_failure = True
81 fail_str = '{}: {}: {} ({})'.format(self.fail,
82 test_description, json.loads(str(err[1]))["issue"],
83 test.id())
84 print(fail_str)
85 if logfile:
86 with open(logfile, "a") as f:
87 f.write(fail_str + "\n")
88
89 def addSuccess(self, test):
90 test_description = test.id().split('.')[-1].replace('_', ' ').replace("cve", "CVE").replace("signed off by",
91 "Signed-off-by").replace("upstream status",
92 "Upstream-Status").replace("non auh",
93 "non-AUH").replace("presence format", "presence")
94 success_str = '{}: {} ({})'.format(self.success,
95 test_description, test.id())
96 print(success_str)
97 if logfile:
98 with open(logfile, "a") as f:
99 f.write(success_str + "\n")
100
101 def addSkip(self, test, reason):
102 test_description = test.id().split('.')[-1].replace('_', ' ').replace("cve", "CVE").replace("signed off by",
103 "Signed-off-by").replace("upstream status",
104 "Upstream-Status").replace("non auh",
105 "non-AUH").replace("presence format", "presence")
106 skip_str = '{}: {}: {} ({})'.format(self.skip,
107 test_description, json.loads(str(reason))["issue"],
108 test.id())
109 print(skip_str)
110 if logfile:
111 with open(logfile, "a") as f:
112 f.write(skip_str + "\n")
113
114 def stopTestRun(self):
115
116 # in case there was an error on repo object creation, just return
117 if self.repo_error:
118 return
119
120 self.repo.clean()
121
122 return PatchTestResult
123
124def _runner(resultklass, prefix=None):
125 # load test with the corresponding prefix
126 loader = unittest.TestLoader()
127 if prefix:
128 loader.testMethodPrefix = prefix
129
130 # create the suite with discovered tests and the corresponding runner
131 suite = loader.discover(start_dir=PatchTestInput.testdir, pattern=PatchTestInput.pattern, top_level_dir=PatchTestInput.topdir)
132 ntc = suite.countTestCases()
133
134 # if there are no test cases, just quit
135 if not ntc:
136 return 2
137 runner = unittest.TextTestRunner(resultclass=resultklass, verbosity=0)
138
139 try:
140 result = runner.run(suite)
141 except:
142 logger.error(traceback.print_exc())
143 logger.error('patchtest: something went wrong')
144 return 1
145 if result.test_failure or result.test_error:
146 return 1
147
148 return 0
149
150def run(patch, logfile=None):
151 """ Load, setup and run pre and post-merge tests """
152 # Get the result class and install the control-c handler
153 unittest.installHandler()
154
155 # run pre-merge tests, meaning those methods with 'pretest' as prefix
156 premerge_resultklass = getResult(patch, False, logfile)
157 premerge_result = _runner(premerge_resultklass, 'pretest')
158
159 # run post-merge tests, meaning those methods with 'test' as prefix
160 postmerge_resultklass = getResult(patch, True, logfile)
161 postmerge_result = _runner(postmerge_resultklass, 'test')
162
163 print('----------------------------------------------------------------------\n')
164 if premerge_result == 2 and postmerge_result == 2:
165 logger.error('patchtest: No test cases found - did you specify the correct suite directory?')
166 if premerge_result == 1 or postmerge_result == 1:
167 logger.error('WARNING: patchtest: At least one patchtest caused a failure or an error - please check https://wiki.yoctoproject.org/wiki/Patchtest for further guidance')
168 else:
169 logger.info('OK: patchtest: All patchtests passed')
170 print('----------------------------------------------------------------------\n')
171 return premerge_result or postmerge_result
172
173def main():
174 tmp_patch = False
175 patch_path = PatchTestInput.patch_path
176 log_results = PatchTestInput.log_results
177 log_path = None
178 patch_list = None
179
180 git_status = os.popen("(cd %s && git status)" % PatchTestInput.repodir).read()
181 status_matches = ["Changes not staged for commit", "Changes to be committed"]
182 if any([match in git_status for match in status_matches]):
183 logger.error("patchtest: there are uncommitted changes in the target repo that would be overwritten. Please commit or restore them before running patchtest")
184 return 1
185
186 if os.path.isdir(patch_path):
187 patch_list = [os.path.join(patch_path, filename) for filename in sorted(os.listdir(patch_path))]
188 else:
189 patch_list = [patch_path]
190
191 for patch in patch_list:
192 if os.path.getsize(patch) == 0:
193 logger.error('patchtest: patch is empty')
194 return 1
195
196 logger.info('Testing patch %s' % patch)
197
198 if log_results:
199 log_path = patch + ".testresult"
200 with open(log_path, "a") as f:
201 f.write("Patchtest results for patch '%s':\n\n" % patch)
202
203 try:
204 if log_path:
205 run(patch, log_path)
206 else:
207 run(patch)
208 finally:
209 if tmp_patch:
210 os.remove(patch)
211
212if __name__ == '__main__':
213 ret = 1
214
215 # Parse the command line arguments and store it on the PatchTestInput namespace
216 PatchTestInput.set_namespace()
217
218 # set debugging level
219 if PatchTestInput.debug:
220 logger.setLevel(logging.DEBUG)
221
222 # if topdir not define, default it to testdir
223 if not PatchTestInput.topdir:
224 PatchTestInput.topdir = PatchTestInput.testdir
225
226 try:
227 ret = main()
228 except Exception:
229 import traceback
230 traceback.print_exc(5)
231
232 sys.exit(ret)
diff --git a/scripts/patchtest-get-branch b/scripts/patchtest-get-branch
new file mode 100755
index 0000000000..c6e242f8b6
--- /dev/null
+++ b/scripts/patchtest-get-branch
@@ -0,0 +1,81 @@
1#!/usr/bin/env python3
2
3# Get target branch from the corresponding mbox
4#
5# NOTE: this script was based on patches coming to the openembedded-core
6# where target branch is defined inside brackets as subject prefix
7# i.e. [master], [rocko], etc.
8#
9# Copyright (C) 2016 Intel Corporation
10#
11# SPDX-License-Identifier: GPL-2.0-only
12#
13
14import mailbox
15import argparse
16import re
17import git
18
19re_prefix = re.compile(r"(\[.*\])", re.DOTALL)
20
21def get_branch(filepath_repo, filepath_mbox, default_branch):
22 branch = None
23
24 # get all remotes branches
25 gitbranches = git.Git(filepath_repo).branch('-a').splitlines()
26
27 # from gitbranches, just get the names
28 branches = [b.split('/')[-1] for b in gitbranches]
29
30 subject = ' '.join(mailbox.mbox(filepath_mbox)[0]['subject'].splitlines())
31
32 # we expect that patches will have somewhere between one and three
33 # consecutive sets of square brackets with tokens inside, e.g.:
34 # 1. [PATCH]
35 # 2. [OE-core][PATCH]
36 # 3. [OE-core][kirkstone][PATCH]
37 # Some of them may also be part of a series, in which case the PATCH
38 # token will be formatted like:
39 # [PATCH 1/4]
40 # or they will be revisions to previous patches, where it will be:
41 # [PATCH v2]
42 # Or they may contain both:
43 # [PATCH v2 3/4]
44 # In any case, we want mprefix to contain all of these tokens so
45 # that we can search for branch names within them.
46 mprefix = re.findall(r'\[.*?\]', subject)
47 found_branch = None
48 if mprefix:
49 # Iterate over the tokens and compare against the branch list to
50 # figure out which one the patch is targeting
51 for token in mprefix:
52 stripped = token.lower().strip('[]')
53 if default_branch in stripped:
54 found_branch = default_branch
55 break
56 else:
57 for branch in branches:
58 # ignore branches named "core"
59 if branch != "core" and stripped.rfind(branch) != -1:
60 found_branch = token.split(' ')[0].strip('[]')
61 break
62
63 # if there's no mprefix content or no known branches were found in
64 # the tokens, assume the target is master
65 if found_branch is None:
66 found_branch = "master"
67
68 return (subject, found_branch)
69
70if __name__ == '__main__':
71
72 parser = argparse.ArgumentParser()
73 parser.add_argument('repo', metavar='REPO', help='Main repository')
74 parser.add_argument('mbox', metavar='MBOX', help='mbox filename')
75 parser.add_argument('--default-branch', metavar='DEFAULT_BRANCH', default='master', help='Use this branch if no one is found')
76 parser.add_argument('--separator', '-s', metavar='SEPARATOR', default=' ', help='Char separator for output data')
77 args = parser.parse_args()
78
79 subject, branch = get_branch(args.repo, args.mbox, args.default_branch)
80 print("branch: %s" % branch)
81
diff --git a/scripts/patchtest-get-series b/scripts/patchtest-get-series
new file mode 100755
index 0000000000..908442089f
--- /dev/null
+++ b/scripts/patchtest-get-series
@@ -0,0 +1,115 @@
1#!/bin/bash -e
2#
3# get-latest-series: Download latest patch series from Patchwork
4#
5# Copyright (C) 2023 BayLibre Inc.
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10# the interval into the past which we want to check for new series, in minutes
11INTERVAL_MINUTES=30
12
13# Maximum number of series to retrieve. the Patchwork API can support up to 250
14# at once
15SERIES_LIMIT=250
16
17# Location to save patches
18DOWNLOAD_PATH="."
19
20# Name of the file to use/check as a log of previously-tested series IDs
21SERIES_TEST_LOG=".series_test.log"
22
23# Patchwork project to pull series patches from
24PROJECT="oe-core"
25
26# The Patchwork server to pull from
27SERVER="https://patchwork.yoctoproject.org/api/1.2/"
28
29help()
30{
31 echo "Usage: get-latest-series [ -i | --interval MINUTES ]
32 [ -d | --directory DIRECTORY ]
33 [ -l | --limit COUNT ]
34 [ -h | --help ]
35 [ -t | --tested-series LOGFILE]
36 [ -p | --project PROJECT ]
37 [ -s | --server SERVER ]"
38 exit 2
39}
40
41while [ "$1" != "" ]; do
42 case $1 in
43 -i|--interval)
44 INTERVAL_MINUTES=$2
45 shift 2
46 ;;
47 -l|--limit)
48 SERIES_LIMIT=$2
49 shift 2
50 ;;
51 -d|--directory)
52 DOWNLOAD_PATH=$2
53 shift 2
54 ;;
55 -p|--project)
56 PROJECT=$2
57 shift 2
58 ;;
59 -s|--server)
60 SERVER=$2
61 shift 2
62 ;;
63 -t|--tested-series)
64 SERIES_TEST_LOG=$2
65 shift 2
66 ;;
67 -h|--help)
68 help
69 ;;
70 *)
71 echo "Unknown option $1"
72 help
73 ;;
74 esac
75done
76
77# The time this script is running at
78START_TIME=$(date --date "now" +"%Y-%m-%dT%H:%M:%S")
79
80# the corresponding timestamp we want to check against for new patch series
81SERIES_CHECK_LIMIT=$(date --date "now - ${INTERVAL_MINUTES} minutes" +"%Y-%m-%dT%H:%M:%S")
82
83echo "Start time is $START_TIME"
84echo "Series check limit is $SERIES_CHECK_LIMIT"
85
86# Create DOWNLOAD_PATH if it doesn't exist
87if [ ! -d "$DOWNLOAD_PATH" ]; then
88 mkdir "${DOWNLOAD_PATH}"
89fi
90
91# Create SERIES_TEST_LOG if it doesn't exist
92if [ ! -f "$SERIES_TEST_LOG" ]; then
93 touch "${SERIES_TEST_LOG}"
94fi
95
96# Retrieve a list of series IDs from the 'git-pw series list' output. The API
97# supports a maximum of 250 results, so make sure we allow that when required
98SERIES_LIST=$(git-pw --project "${PROJECT}" --server "${SERVER}" series list --since "${SERIES_CHECK_LIMIT}" --limit "${SERIES_LIMIT}" | awk '{print $2}' | xargs | sed -e 's/[^0-9 ]//g')
99
100if [ -z "$SERIES_LIST" ]; then
101 echo "No new series for project ${PROJECT} since ${SERIES_CHECK_LIMIT}"
102 exit 0
103fi
104
105# Check each series ID
106for SERIES in $SERIES_LIST; do
107 # Download the series only if it's not found in the SERIES_TEST_LOG
108 if ! grep -w --quiet "${SERIES}" "${SERIES_TEST_LOG}"; then
109 echo "Downloading $SERIES..."
110 git-pw series download --separate "${SERIES}" "${DOWNLOAD_PATH}"
111 echo "${SERIES}" >> "${SERIES_TEST_LOG}"
112 else
113 echo "Already tested ${SERIES}. Skipping..."
114 fi
115done
diff --git a/scripts/patchtest-send-results b/scripts/patchtest-send-results
new file mode 100755
index 0000000000..8a3dadbd11
--- /dev/null
+++ b/scripts/patchtest-send-results
@@ -0,0 +1,110 @@
1#!/usr/bin/env python3
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# patchtest: execute all unittest test cases discovered for a single patch
6# Note that this script is currently under development and has been
7# hard-coded with default values for testing purposes. This script
8# should not be used without changing the default recipient, at minimum.
9#
10# Copyright (C) 2023 BayLibre Inc.
11#
12# SPDX-License-Identifier: GPL-2.0-only
13#
14
15import argparse
16import boto3
17import configparser
18import mailbox
19import os
20import re
21import sys
22
23greeting = """Thank you for your submission. Patchtest identified one
24or more issues with the patch. Please see the log below for
25more information:\n\n---\n"""
26
27suggestions = """\n---\n\nPlease address the issues identified and
28submit a new revision of the patch, or alternatively, reply to this
29email with an explanation of why the patch should be accepted. If you
30believe these results are due to an error in patchtest, please submit a
31bug at https://bugzilla.yoctoproject.org/ (use the 'Patchtest' category
32under 'Yocto Project Subprojects'). For more information on specific
33failures, see: https://wiki.yoctoproject.org/wiki/Patchtest. Thank
34you!"""
35
36def has_a_failed_test(raw_results):
37 return any(raw_result.split(':')[0] == "FAIL" for raw_result in raw_results.splitlines())
38
39parser = argparse.ArgumentParser(description="Send patchtest results to a submitter for a given patch")
40parser.add_argument("-p", "--patch", dest="patch", required=True, help="The patch file to summarize")
41parser.add_argument("-d", "--debug", dest="debug", required=False, action='store_true', help="Print raw email headers and content, but don't actually send it")
42args = parser.parse_args()
43
44if not os.path.exists(args.patch):
45 print(f"Patch '{args.patch}' not found - did you provide the right path?")
46 sys.exit(1)
47elif not os.path.exists(args.patch + ".testresult"):
48 print(f"Found patch '{args.patch}' but '{args.patch}.testresult' was not present. Have you run patchtest on the patch?")
49 sys.exit(1)
50
51result_file = args.patch + ".testresult"
52testresult = None
53
54with open(result_file, "r") as f:
55 testresult = f.read()
56
57# we know these patch files will only contain a single patch, so only
58# worry about the first element for getting the subject
59mbox = mailbox.mbox(args.patch)
60mbox_subject = mbox[0]['subject']
61subject_line = f"Patchtest results for {mbox_subject}"
62
63# extract the submitter email address and use it as the reply address
64# for the results
65reply_address = mbox[0]['from']
66
67# extract the message ID and use that as the in-reply-to address
68# TODO: This will need to change again when patchtest can handle a whole
69# series at once
70in_reply_to = mbox[0]['Message-ID']
71
72# the address the results email is sent from
73from_address = "patchtest@automation.yoctoproject.org"
74
75# mailing list to CC
76cc_address = "openembedded-core@lists.openembedded.org"
77
78if has_a_failed_test(testresult):
79 reply_contents = None
80 if len(max(open(result_file, 'r'), key=len)) > 220:
81 warning = "Tests failed for the patch, but the results log could not be processed due to excessive result line length."
82 reply_contents = greeting + warning + suggestions
83 else:
84 reply_contents = greeting + testresult + suggestions
85
86 ses_client = boto3.client('ses', region_name='us-west-2')
87
88 # Construct the headers for the email. We only want to reply
89 # directly to the tested patch, so make In-Reply-To and References
90 # the same value.
91 raw_data = 'From: ' + from_address + '\nTo: ' + reply_address + \
92 '\nCC: ' + cc_address + '\nSubject:' + subject_line + \
93 '\nIn-Reply-To:' + in_reply_to + \
94 '\nReferences:' + in_reply_to + \
95 '\nMIME-Version: 1.0" + \
96 "\nContent-type: Multipart/Mixed;boundary="NextPart"\n\n--NextPart\nContent-Type: text/plain\n\n' + \
97 reply_contents + '\n\n--NextPart'
98
99 if args.debug:
100 print(f"RawMessage: \n\n{raw_data}")
101 else:
102 response = ses_client.send_raw_email(
103 Source="patchtest@automation.yoctoproject.org",
104 RawMessage={
105 "Data": raw_data,
106 },
107 )
108
109else:
110 print(f"No failures identified for {args.patch}.")
diff --git a/scripts/patchtest-setup-sharedir b/scripts/patchtest-setup-sharedir
new file mode 100755
index 0000000000..277677e527
--- /dev/null
+++ b/scripts/patchtest-setup-sharedir
@@ -0,0 +1,83 @@
1#!/bin/bash -e
2#
3# patchtest-setup-sharedir: Setup a directory for storing mboxes and
4# repositories to be shared with the guest machine, including updates to
5# the repos if the directory already exists
6#
7# Copyright (C) 2023 BayLibre Inc.
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12# poky repository
13POKY_REPO="https://git.yoctoproject.org/poky"
14
15# patchtest repository
16PATCHTEST_REPO="https://git.yoctoproject.org/patchtest"
17
18# the name of the directory
19SHAREDIR="patchtest_share"
20
21help()
22{
23 echo "Usage: patchtest-setup-sharedir [ -d | --directory SHAREDIR ]
24 [ -p | --patchtest PATCHTEST_REPO ]
25 [ -y | --poky POKY_REPO ]"
26 exit 2
27}
28
29while [ "$1" != "" ]; do
30 case $1 in
31 -d|--directory)
32 SHAREDIR=$2
33 shift 2
34 ;;
35 -p|--patchtest)
36 PATCHTEST_REPO=$2
37 shift 2
38 ;;
39 -y|--poky)
40 POKY_REPO=$2
41 shift 2
42 ;;
43 -h|--help)
44 help
45 ;;
46 *)
47 echo "Unknown option $1"
48 help
49 ;;
50 esac
51done
52
53# define MBOX_DIR where the patch series will be stored by
54# get-latest-series
55MBOX_DIR="${SHAREDIR}/mboxes"
56
57# Create SHAREDIR if it doesn't exist
58if [ ! -d "$SHAREDIR" ]; then
59 mkdir -p "${SHAREDIR}"
60 echo "Created ${SHAREDIR}"
61fi
62
63# Create the mboxes directory if it doesn't exist
64if [ ! -d "$MBOX_DIR" ]; then
65 mkdir -p "${MBOX_DIR}"
66 echo "Created ${MBOX_DIR}"
67fi
68
69# clone poky if it's not already present; otherwise, update it
70if [ ! -d "$POKY_REPO" ]; then
71 BASENAME=$(basename ${POKY_REPO})
72 git clone "${POKY_REPO}" "${SHAREDIR}/${BASENAME}"
73else
74 (cd "${SHAREDIR}/$BASENAME" && git pull)
75fi
76
77# clone patchtest if it's not already present; otherwise, update it
78if [ ! -d "$PATCHTEST_REPO" ]; then
79 BASENAME=$(basename ${PATCHTEST_REPO})
80 git clone "${PATCHTEST_REPO}" "${SHAREDIR}/${BASENAME}"
81else
82 (cd "${SHAREDIR}/$BASENAME" && git pull)
83fi
diff --git a/scripts/patchtest.README b/scripts/patchtest.README
new file mode 100644
index 0000000000..76b5fcdb6d
--- /dev/null
+++ b/scripts/patchtest.README
@@ -0,0 +1,153 @@
1# Patchtest
2
3## Introduction
4
5Patchtest is a test framework for community patches based on the standard
6unittest python module. As input, it needs tree elements to work properly:
7a patch in mbox format (either created with `git format-patch` or fetched
8from 'patchwork'), a test suite and a target repository.
9
10The first test suite intended to be used with patchtest is found in the
11openembedded-core repository [1] targeted for patches that get into the
12openembedded-core mailing list [2]. This suite is also intended as a
13baseline for development of similar suites for other layers as needed.
14
15Patchtest can either run on a host or a guest machine, depending on which
16environment the execution needs to be done. If you plan to test your own patches
17(a good practice before these are sent to the mailing list), the easiest way is
18to install and execute on your local host; in the other hand, if automatic
19testing is intended, the guest method is strongly recommended. The guest
20method requires the use of the patchtest layer, in addition to the tools
21available in oe-core: https://git.yoctoproject.org/patchtest/
22
23## Installation
24
25As a tool for use with the Yocto Project, the [quick start guide](https://docs.yoctoproject.org/brief-yoctoprojectqs/index.html)
26contains the necessary prerequisites for a basic project. In addition,
27patchtest relies on the following Python modules:
28
29- boto3 (for sending automated results emails only)
30- git-pw>=2.5.0
31- jinja2
32- pylint
33- pyparsing>=3.0.9
34- unidiff
35
36These can be installed by running `pip install -r
37meta/lib/patchtest/requirements.txt`. Note that git-pw is not
38automatically added to the user's PATH; by default, it is installed at
39~/.local/bin/git-pw.
40
41For git-pw (and therefore scripts such as patchtest-get--series) to work, you need
42to provide a Patchwork instance in your user's .gitconfig, like so (the project
43can be specified using the --project argument):
44
45 git config --global pw.server "https://patchwork.yoctoproject.org/api/1.2/"
46
47To work with patchtest, you should have the following repositories cloned:
48
491. https://git.openembedded.org/openembedded-core/ (or https://git.yoctoproject.org/poky/)
502. https://git.openembedded.org/bitbake/ (if not using poky)
513. https://git.yoctoproject.org/patchtest (if using guest mode)
52
53## Usage
54
55### Obtaining Patches
56
57Patch files can be obtained directly from cloned repositories using `git
58format-patch -N` (where N is the number of patches starting from HEAD to
59generate). git-pw can also be used with filters for users, patch/series IDs,
60and timeboxes if specific patches are desired. For more information, see the
61git-pw [documentation](https://patchwork.readthedocs.io/projects/git-pw/en/latest/).
62
63Alternatively, `scripts/patchtest-get-series` can be used to pull mbox files from
64the Patchwork instance configured previously in .gitconfig. It uses a log file
65called ".series_test.log" to store and compare series IDs so that the same
66versions of a patch are not tested multiple times unintentionally. By default,
67it will pull up to five patch series from the last 30 minutes using oe-core as
68the target project, but these parameters can be configured using the `--limit`,
69`--interval`, and `--project` arguments respectively. For more information, run
70`patchtest-get-series -h`.
71
72### Host Mode
73
74To run patchtest on the host, do the following:
75
761. In openembedded-core/poky, do `source oe-init-build-env`
772. Generate patch files from the target repository by doing `git-format patch -N`,
78 where N is the number of patches starting at HEAD, or by using git-pw
79 or patchtest-get-series
803. Run patchtest on a patch file by doing the following:
81
82 patchtest --patch /path/to/patch/file
83
84 or, if you have stored the patch files in a directory, do:
85
86 patchtest --directory /path/to/patch/directory
87
88 For example, to test `master-gcc-Fix--fstack-protector-issue-on-aarch64.patch` against the oe-core test suite:
89
90 patchtest --patch master-gcc-Fix--fstack-protector-issue-on-aarch64.patch
91
92 If you want to use a different test suite or target repository, you can use the --testdir and --repodir flags:
93
94 patchtest --patch /path/to/patch/file --repodir /path/to/repo --testdir /path/to/test/dir
95
96### Guest Mode
97
98Patchtest's guest mode has been refactored to more closely mirror the
99typical Yocto Project image build workflow, but there are still some key
100differences to keep in mind. The primary objective is to provide a level
101of isolation from the host when testing patches pulled automatically
102from the mailing lists. When executed this way, the test process is
103essentially running random code from the internet and could be
104catastrophic if malicious bits or even poorly-handled edge cases aren't
105protected against. In order to use this mode, the
106https://git.yoctoproject.org/patchtest/ repository must be cloned and
107the meta-patchtest layer added to bblayers.conf.
108
109The general flow of guest mode is:
110
1111. Run patchtest-setup-sharedir --directory <dirname> to create a
112 directory for mounting
1132. Collect patches via patchtest-get-series (or other manual step) into the
114 <dirname>/mboxes path
1153. Ensure that a user with ID 1200 has appropriate read/write
116 permissions to <dirname> and <dirname>/mboxes, so that the
117 "patchtest" user in the core-image-patchtest image can function
1184. Build the core-image-patchtest image
1195. Run the core-image-patchtest image with the mounted sharedir, like
120 so:
121 `runqemu kvm nographic qemuparams="-snapshot -fsdev
122 local,id=test_mount,path=/workspace/yocto/poky/build/patchtestdir,security_model=mapped
123 -device virtio-9p-pci,fsdev=test_mount,mount_tag=test_mount -smp 4 -m
124 2048"`
125
126Patchtest runs as an initscript for the core-image-patchtest image and
127shuts down after completion, so there is no input required from a user
128during operation. Unlike in host mode, the guest is designed to
129automatically generate test result files, in the same directory as the
130targeted patch files but with .testresult as an extension. These contain
131the entire output of the patchtest run for each respective pass,
132including the PASS, FAIL, and SKIP indicators for each test run.
133
134## Contributing
135
136The yocto mailing list (openembedded-core@lists.openembedded.org) is used for questions,
137comments and patch review. It is subscriber only, so please register before
138posting.
139
140When sending single patches, please use something like:
141
142 git send-email -M -1 --to=openembedded-core@lists.openembedded.org --subject-prefix=OE-core][PATCH
143
144## Maintenance
145-----------
146
147Maintainers:
148 Trevor Gamblin <tgamblin@baylibre.com>
149
150## Links
151-----
152[1] https://git.openembedded.org/openembedded-core/
153[2] https://www.yoctoproject.org/community/mailing-lists/
diff --git a/scripts/postinst-intercepts/update_gtk_icon_cache b/scripts/postinst-intercepts/update_gtk_icon_cache
index 99367a2855..a92bd840c6 100644
--- a/scripts/postinst-intercepts/update_gtk_icon_cache
+++ b/scripts/postinst-intercepts/update_gtk_icon_cache
@@ -11,7 +11,11 @@ $STAGING_DIR_NATIVE/${libdir_native}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --u
11 11
12for icondir in $D/usr/share/icons/*/ ; do 12for icondir in $D/usr/share/icons/*/ ; do
13 if [ -d $icondir ] ; then 13 if [ -d $icondir ] ; then
14 gtk-update-icon-cache -fqt $icondir 14 for gtkuic_cmd in gtk-update-icon-cache gtk4-update-icon-cache ; do
15 if [ -n "$(which $gtkuic_cmd)" ]; then
16 $gtkuic_cmd -fqt $icondir
17 fi
18 done
15 fi 19 fi
16done 20done
17 21
diff --git a/scripts/postinst-intercepts/update_mandb b/scripts/postinst-intercepts/update_mandb
new file mode 100644
index 0000000000..f91bafdb11
--- /dev/null
+++ b/scripts/postinst-intercepts/update_mandb
@@ -0,0 +1,18 @@
1#!/bin/sh
2#
3# SPDX-License-Identifier: MIT
4#
5
6set -eu
7
8# Create a temporary man_db.conf with paths to the rootfs, as mandb needs absolute paths
9CONFIG=$(mktemp --tmpdir update-mandb.XXXXX)
10sed "s:\(\s\)/:\1$D/:g" $D${sysconfdir}/man_db.conf > $CONFIG
11
12mkdir -p $D${localstatedir}/cache/man/
13
14PSEUDO_UNLOAD=1 ${binprefix}qemuwrapper -L $D $D${bindir}/mandb --config-file $CONFIG --create
15
16rm -f $CONFIG
17
18chown -R man:man $D${localstatedir}/cache/man/
diff --git a/scripts/postinst-intercepts/update_udev_hwdb b/scripts/postinst-intercepts/update_udev_hwdb
index 8076b8ae6f..8b3f5de791 100644
--- a/scripts/postinst-intercepts/update_udev_hwdb
+++ b/scripts/postinst-intercepts/update_udev_hwdb
@@ -9,14 +9,17 @@ case "${PREFERRED_PROVIDER_udev}" in
9 systemd) 9 systemd)
10 UDEV_EXTRA_ARGS="--usr" 10 UDEV_EXTRA_ARGS="--usr"
11 UDEVLIBDIR="${rootlibexecdir}" 11 UDEVLIBDIR="${rootlibexecdir}"
12 UDEVADM="${base_bindir}/udevadm"
12 ;; 13 ;;
13 14
14 *) 15 *)
15 UDEV_EXTRA_ARGS="" 16 UDEV_EXTRA_ARGS=""
16 UDEVLIBDIR="${sysconfdir}" 17 UDEVLIBDIR="${sysconfdir}"
18 UDEVADM="${bindir}/udevadm"
17 ;; 19 ;;
18esac 20esac
19 21
20rm -f $D${UDEVLIBDIR}/udev/hwdb.bin 22rm -f $D${UDEVLIBDIR}/udev/hwdb.bin
21PSEUDO_UNLOAD=1 ${binprefix}qemuwrapper -L $D $D${libexecdir}/${binprefix}udevadm hwdb --update --root $D ${UDEV_EXTRA_ARGS} 23PSEUDO_UNLOAD=1 ${binprefix}qemuwrapper -L $D $D${UDEVADM} hwdb --update --root $D ${UDEV_EXTRA_ARGS} ||
24 PSEUDO_UNLOAD=1 qemuwrapper -L $D $D${UDEVADM} hwdb --update --root $D ${UDEV_EXTRA_ARGS}
22chown root:root $D${UDEVLIBDIR}/udev/hwdb.bin 25chown root:root $D${UDEVLIBDIR}/udev/hwdb.bin
diff --git a/scripts/pybootchartgui/pybootchartgui/draw.py b/scripts/pybootchartgui/pybootchartgui/draw.py
index 53324b9f8b..c6e67833ab 100644
--- a/scripts/pybootchartgui/pybootchartgui/draw.py
+++ b/scripts/pybootchartgui/pybootchartgui/draw.py
@@ -80,6 +80,22 @@ MEM_BUFFERS_COLOR = (0.4, 0.4, 0.4, 0.3)
80# Swap color 80# Swap color
81MEM_SWAP_COLOR = DISK_TPUT_COLOR 81MEM_SWAP_COLOR = DISK_TPUT_COLOR
82 82
83# avg10 CPU pressure color
84CPU_PRESSURE_AVG10_COLOR = (0.0, 0.0, 0.0, 1.0)
85# delta total CPU pressure color
86CPU_PRESSURE_TOTAL_COLOR = CPU_COLOR
87# avg10 IO pressure color
88IO_PRESSURE_AVG10_COLOR = (0.0, 0.0, 0.0, 1.0)
89# delta total IO pressure color
90IO_PRESSURE_TOTAL_COLOR = IO_COLOR
91# avg10 memory pressure color
92MEM_PRESSURE_AVG10_COLOR = (0.0, 0.0, 0.0, 1.0)
93# delta total memory pressure color
94MEM_PRESSURE_TOTAL_COLOR = DISK_TPUT_COLOR
95
96
97
98
83# Process border color. 99# Process border color.
84PROC_BORDER_COLOR = (0.71, 0.71, 0.71, 1.0) 100PROC_BORDER_COLOR = (0.71, 0.71, 0.71, 1.0)
85# Waiting process color. 101# Waiting process color.
@@ -267,11 +283,14 @@ def draw_chart(ctx, color, fill, chart_bounds, data, proc_tree, data_range):
267 # avoid divide by zero 283 # avoid divide by zero
268 if max_y == 0: 284 if max_y == 0:
269 max_y = 1.0 285 max_y = 1.0
270 xscale = float (chart_bounds[2]) / (max_x - x_shift) 286 if (max_x - x_shift):
287 xscale = float (chart_bounds[2]) / (max_x - x_shift)
288 else:
289 xscale = float (chart_bounds[2])
271 # If data_range is given, scale the chart so that the value range in 290 # If data_range is given, scale the chart so that the value range in
272 # data_range matches the chart bounds exactly. 291 # data_range matches the chart bounds exactly.
273 # Otherwise, scale so that the actual data matches the chart bounds. 292 # Otherwise, scale so that the actual data matches the chart bounds.
274 if data_range: 293 if data_range and (data_range[1] - data_range[0]):
275 yscale = float(chart_bounds[3]) / (data_range[1] - data_range[0]) 294 yscale = float(chart_bounds[3]) / (data_range[1] - data_range[0])
276 ybase = data_range[0] 295 ybase = data_range[0]
277 else: 296 else:
@@ -337,6 +356,12 @@ def extents(options, xscale, trace):
337 h += 30 + bar_h 356 h += 30 + bar_h
338 if trace.disk_stats: 357 if trace.disk_stats:
339 h += 30 + bar_h 358 h += 30 + bar_h
359 if trace.cpu_pressure:
360 h += 30 + bar_h
361 if trace.io_pressure:
362 h += 30 + bar_h
363 if trace.mem_pressure:
364 h += 30 + bar_h
340 if trace.monitor_disk: 365 if trace.monitor_disk:
341 h += 30 + bar_h 366 h += 30 + bar_h
342 if trace.mem_stats: 367 if trace.mem_stats:
@@ -412,6 +437,108 @@ def render_charts(ctx, options, clip, trace, curr_y, w, h, sec_w):
412 437
413 curr_y = curr_y + 30 + bar_h 438 curr_y = curr_y + 30 + bar_h
414 439
440 # render CPU pressure chart
441 if trace.cpu_pressure:
442 max_sample_avg = max (trace.cpu_pressure, key = lambda s: s.avg10)
443 max_sample_total = max (trace.cpu_pressure, key = lambda s: s.deltaTotal)
444 draw_legend_line(ctx, "avg10 CPU Pressure (max %d%%)" % (max_sample_avg.avg10), CPU_PRESSURE_AVG10_COLOR, off_x, curr_y+20, leg_s)
445 draw_legend_box(ctx, "delta total CPU Pressure (max %d)" % (max_sample_total.deltaTotal), CPU_PRESSURE_TOTAL_COLOR, off_x + 240, curr_y+20, leg_s)
446
447 # render delta total cpu
448 chart_rect = (off_x, curr_y+30, w, bar_h)
449 if clip_visible (clip, chart_rect):
450 draw_box_ticks (ctx, chart_rect, sec_w)
451 draw_annotations (ctx, proc_tree, trace.times, chart_rect)
452 draw_chart (ctx, CPU_PRESSURE_TOTAL_COLOR, True, chart_rect, \
453 [(sample.time, sample.deltaTotal) for sample in trace.cpu_pressure], \
454 proc_tree, None)
455
456 # render avg10 cpu
457 if clip_visible (clip, chart_rect):
458 draw_chart (ctx, CPU_PRESSURE_AVG10_COLOR, False, chart_rect, \
459 [(sample.time, sample.avg10) for sample in trace.cpu_pressure], \
460 proc_tree, None)
461
462 pos_x = off_x + ((max_sample_avg.time - proc_tree.start_time) * w / proc_tree.duration)
463
464 shift_x, shift_y = -20, 20
465 if (pos_x < off_x + 245):
466 shift_x, shift_y = 5, 40
467
468
469 label = "%d%%" % (max_sample_avg.avg10)
470 draw_text (ctx, label, CPU_PRESSURE_AVG10_COLOR, pos_x + shift_x, curr_y + shift_y)
471
472 curr_y = curr_y + 30 + bar_h
473
474 # render I/O pressure chart
475 if trace.io_pressure:
476 max_sample_avg = max (trace.io_pressure, key = lambda s: s.avg10)
477 max_sample_total = max (trace.io_pressure, key = lambda s: s.deltaTotal)
478 draw_legend_line(ctx, "avg10 I/O Pressure (max %d%%)" % (max_sample_avg.avg10), IO_PRESSURE_AVG10_COLOR, off_x, curr_y+20, leg_s)
479 draw_legend_box(ctx, "delta total I/O Pressure (max %d)" % (max_sample_total.deltaTotal), IO_PRESSURE_TOTAL_COLOR, off_x + 240, curr_y+20, leg_s)
480
481 # render delta total io
482 chart_rect = (off_x, curr_y+30, w, bar_h)
483 if clip_visible (clip, chart_rect):
484 draw_box_ticks (ctx, chart_rect, sec_w)
485 draw_annotations (ctx, proc_tree, trace.times, chart_rect)
486 draw_chart (ctx, IO_PRESSURE_TOTAL_COLOR, True, chart_rect, \
487 [(sample.time, sample.deltaTotal) for sample in trace.io_pressure], \
488 proc_tree, None)
489
490 # render avg10 io
491 if clip_visible (clip, chart_rect):
492 draw_chart (ctx, IO_PRESSURE_AVG10_COLOR, False, chart_rect, \
493 [(sample.time, sample.avg10) for sample in trace.io_pressure], \
494 proc_tree, None)
495
496 pos_x = off_x + ((max_sample_avg.time - proc_tree.start_time) * w / proc_tree.duration)
497
498 shift_x, shift_y = -20, 20
499 if (pos_x < off_x + 245):
500 shift_x, shift_y = 5, 40
501
502
503 label = "%d%%" % (max_sample_avg.avg10)
504 draw_text (ctx, label, IO_PRESSURE_AVG10_COLOR, pos_x + shift_x, curr_y + shift_y)
505
506 curr_y = curr_y + 30 + bar_h
507
508 # render MEM pressure chart
509 if trace.mem_pressure:
510 max_sample_avg = max (trace.mem_pressure, key = lambda s: s.avg10)
511 max_sample_total = max (trace.mem_pressure, key = lambda s: s.deltaTotal)
512 draw_legend_line(ctx, "avg10 MEM Pressure (max %d%%)" % (max_sample_avg.avg10), MEM_PRESSURE_AVG10_COLOR, off_x, curr_y+20, leg_s)
513 draw_legend_box(ctx, "delta total MEM Pressure (max %d)" % (max_sample_total.deltaTotal), MEM_PRESSURE_TOTAL_COLOR, off_x + 240, curr_y+20, leg_s)
514
515 # render delta total mem
516 chart_rect = (off_x, curr_y+30, w, bar_h)
517 if clip_visible (clip, chart_rect):
518 draw_box_ticks (ctx, chart_rect, sec_w)
519 draw_annotations (ctx, proc_tree, trace.times, chart_rect)
520 draw_chart (ctx, MEM_PRESSURE_TOTAL_COLOR, True, chart_rect, \
521 [(sample.time, sample.deltaTotal) for sample in trace.mem_pressure], \
522 proc_tree, None)
523
524 # render avg10 mem
525 if clip_visible (clip, chart_rect):
526 draw_chart (ctx, MEM_PRESSURE_AVG10_COLOR, False, chart_rect, \
527 [(sample.time, sample.avg10) for sample in trace.mem_pressure], \
528 proc_tree, None)
529
530 pos_x = off_x + ((max_sample_avg.time - proc_tree.start_time) * w / proc_tree.duration)
531
532 shift_x, shift_y = -20, 20
533 if (pos_x < off_x + 245):
534 shift_x, shift_y = 5, 40
535
536
537 label = "%d%%" % (max_sample_avg.avg10)
538 draw_text (ctx, label, MEM_PRESSURE_AVG10_COLOR, pos_x + shift_x, curr_y + shift_y)
539
540 curr_y = curr_y + 30 + bar_h
541
415 # render disk space usage 542 # render disk space usage
416 # 543 #
417 # Draws the amount of disk space used on each volume relative to the 544 # Draws the amount of disk space used on each volume relative to the
@@ -493,8 +620,8 @@ def render_charts(ctx, options, clip, trace, curr_y, w, h, sec_w):
493 620
494 return curr_y 621 return curr_y
495 622
496def render_processes_chart(ctx, options, trace, curr_y, w, h, sec_w): 623def render_processes_chart(ctx, options, trace, curr_y, width, h, sec_w):
497 chart_rect = [off_x, curr_y+header_h, w, h - curr_y - 1 * off_y - header_h ] 624 chart_rect = [off_x, curr_y+header_h, width, h - curr_y - 1 * off_y - header_h ]
498 625
499 draw_legend_box (ctx, "Configure", \ 626 draw_legend_box (ctx, "Configure", \
500 TASK_COLOR_CONFIGURE, off_x , curr_y + 45, leg_s) 627 TASK_COLOR_CONFIGURE, off_x , curr_y + 45, leg_s)
@@ -519,8 +646,9 @@ def render_processes_chart(ctx, options, trace, curr_y, w, h, sec_w):
519 offset = trace.min or min(trace.start.keys()) 646 offset = trace.min or min(trace.start.keys())
520 for start in sorted(trace.start.keys()): 647 for start in sorted(trace.start.keys()):
521 for process in sorted(trace.start[start]): 648 for process in sorted(trace.start[start]):
649 elapsed_time = trace.processes[process][1] - start
522 if not options.app_options.show_all and \ 650 if not options.app_options.show_all and \
523 trace.processes[process][1] - start < options.app_options.mintime: 651 elapsed_time < options.app_options.mintime:
524 continue 652 continue
525 task = process.split(":")[1] 653 task = process.split(":")[1]
526 654
@@ -529,14 +657,23 @@ def render_processes_chart(ctx, options, trace, curr_y, w, h, sec_w):
529 #print(s) 657 #print(s)
530 658
531 x = chart_rect[0] + (start - offset) * sec_w 659 x = chart_rect[0] + (start - offset) * sec_w
532 w = ((trace.processes[process][1] - start) * sec_w) 660 w = elapsed_time * sec_w
661
662 def set_alfa(color, alfa):
663 clist = list(color)
664 clist[-1] = alfa
665 return tuple(clist)
533 666
534 #print("proc at %s %s %s %s" % (x, y, w, proc_h)) 667 #print("proc at %s %s %s %s" % (x, y, w, proc_h))
535 col = None 668 col = None
536 if task == "do_compile": 669 if task == "do_compile":
537 col = TASK_COLOR_COMPILE 670 col = TASK_COLOR_COMPILE
671 elif "do_compile" in task:
672 col = set_alfa(TASK_COLOR_COMPILE, 0.25)
538 elif task == "do_configure": 673 elif task == "do_configure":
539 col = TASK_COLOR_CONFIGURE 674 col = TASK_COLOR_CONFIGURE
675 elif "do_configure" in task:
676 col = set_alfa(TASK_COLOR_CONFIGURE, 0.25)
540 elif task == "do_install": 677 elif task == "do_install":
541 col = TASK_COLOR_INSTALL 678 col = TASK_COLOR_INSTALL
542 elif task == "do_populate_sysroot": 679 elif task == "do_populate_sysroot":
@@ -554,7 +691,10 @@ def render_processes_chart(ctx, options, trace, curr_y, w, h, sec_w):
554 draw_fill_rect(ctx, col, (x, y, w, proc_h)) 691 draw_fill_rect(ctx, col, (x, y, w, proc_h))
555 draw_rect(ctx, PROC_BORDER_COLOR, (x, y, w, proc_h)) 692 draw_rect(ctx, PROC_BORDER_COLOR, (x, y, w, proc_h))
556 693
557 draw_label_in_box(ctx, PROC_TEXT_COLOR, process, x, y + proc_h - 4, w, proc_h) 694 # Show elapsed time for each task
695 process = "%ds %s" % (elapsed_time, process)
696 draw_label_in_box(ctx, PROC_TEXT_COLOR, process, x, y + proc_h - 4, w, width)
697
558 y = y + proc_h 698 y = y + proc_h
559 699
560 return curr_y 700 return curr_y
@@ -695,7 +835,7 @@ def draw_processes_recursively(ctx, proc, proc_tree, y, proc_h, rect, clip) :
695 cmdString = proc.cmd 835 cmdString = proc.cmd
696 else: 836 else:
697 cmdString = '' 837 cmdString = ''
698 if (OPTIONS.show_pid or OPTIONS.show_all) and ipid is not 0: 838 if (OPTIONS.show_pid or OPTIONS.show_all) and ipid != 0:
699 cmdString = cmdString + " [" + str(ipid // 1000) + "]" 839 cmdString = cmdString + " [" + str(ipid // 1000) + "]"
700 if OPTIONS.show_all: 840 if OPTIONS.show_all:
701 if proc.args: 841 if proc.args:
@@ -793,7 +933,7 @@ class CumlSample:
793 if self.color is None: 933 if self.color is None:
794 i = self.next() % HSV_MAX_MOD 934 i = self.next() % HSV_MAX_MOD
795 h = 0.0 935 h = 0.0
796 if i is not 0: 936 if i != 0:
797 h = (1.0 * i) / HSV_MAX_MOD 937 h = (1.0 * i) / HSV_MAX_MOD
798 s = 0.5 938 s = 0.5
799 v = 1.0 939 v = 1.0
diff --git a/scripts/pybootchartgui/pybootchartgui/parsing.py b/scripts/pybootchartgui/pybootchartgui/parsing.py
index b42dac6b88..63a53b6b88 100644
--- a/scripts/pybootchartgui/pybootchartgui/parsing.py
+++ b/scripts/pybootchartgui/pybootchartgui/parsing.py
@@ -49,6 +49,9 @@ class Trace:
49 self.parent_map = None 49 self.parent_map = None
50 self.mem_stats = [] 50 self.mem_stats = []
51 self.monitor_disk = None 51 self.monitor_disk = None
52 self.cpu_pressure = []
53 self.io_pressure = []
54 self.mem_pressure = []
52 self.times = [] # Always empty, but expected by draw.py when drawing system charts. 55 self.times = [] # Always empty, but expected by draw.py when drawing system charts.
53 56
54 if len(paths): 57 if len(paths):
@@ -128,7 +131,7 @@ class Trace:
128 def compile(self, writer): 131 def compile(self, writer):
129 132
130 def find_parent_id_for(pid): 133 def find_parent_id_for(pid):
131 if pid is 0: 134 if pid == 0:
132 return 0 135 return 0
133 ppid = self.parent_map.get(pid) 136 ppid = self.parent_map.get(pid)
134 if ppid: 137 if ppid:
@@ -554,6 +557,29 @@ def _parse_monitor_disk_log(file):
554 557
555 return disk_stats 558 return disk_stats
556 559
560def _parse_pressure_logs(file, filename):
561 """
562 Parse file for "some" pressure with 'avg10', 'avg60' 'avg300' and delta total values
563 (in that order) directly stored on one line for both CPU and IO, based on filename.
564 """
565 pressure_stats = []
566 if filename == "cpu.log":
567 SamplingClass = CPUPressureSample
568 elif filename == "memory.log":
569 SamplingClass = MemPressureSample
570 else:
571 SamplingClass = IOPressureSample
572 for time, lines in _parse_timed_blocks(file):
573 for line in lines:
574 if not line: continue
575 tokens = line.split()
576 avg10 = float(tokens[0])
577 avg60 = float(tokens[1])
578 avg300 = float(tokens[2])
579 delta = float(tokens[3])
580 pressure_stats.append(SamplingClass(time, avg10, avg60, avg300, delta))
581
582 return pressure_stats
557 583
558# if we boot the kernel with: initcall_debug printk.time=1 we can 584# if we boot the kernel with: initcall_debug printk.time=1 we can
559# get all manner of interesting data from the dmesg output 585# get all manner of interesting data from the dmesg output
@@ -741,6 +767,13 @@ def _do_parse(writer, state, filename, file):
741 state.cmdline = _parse_cmdline_log(writer, file) 767 state.cmdline = _parse_cmdline_log(writer, file)
742 elif name == "monitor_disk.log": 768 elif name == "monitor_disk.log":
743 state.monitor_disk = _parse_monitor_disk_log(file) 769 state.monitor_disk = _parse_monitor_disk_log(file)
770 #pressure logs are in a subdirectory
771 elif name == "cpu.log":
772 state.cpu_pressure = _parse_pressure_logs(file, name)
773 elif name == "io.log":
774 state.io_pressure = _parse_pressure_logs(file, name)
775 elif name == "memory.log":
776 state.mem_pressure = _parse_pressure_logs(file, name)
744 elif not filename.endswith('.log'): 777 elif not filename.endswith('.log'):
745 _parse_bitbake_buildstats(writer, state, filename, file) 778 _parse_bitbake_buildstats(writer, state, filename, file)
746 t2 = time.process_time() 779 t2 = time.process_time()
diff --git a/scripts/pybootchartgui/pybootchartgui/samples.py b/scripts/pybootchartgui/pybootchartgui/samples.py
index 9fc309b3ab..a70d8a5a28 100644
--- a/scripts/pybootchartgui/pybootchartgui/samples.py
+++ b/scripts/pybootchartgui/pybootchartgui/samples.py
@@ -37,6 +37,31 @@ class CPUSample:
37 return str(self.time) + "\t" + str(self.user) + "\t" + \ 37 return str(self.time) + "\t" + str(self.user) + "\t" + \
38 str(self.sys) + "\t" + str(self.io) + "\t" + str (self.swap) 38 str(self.sys) + "\t" + str(self.io) + "\t" + str (self.swap)
39 39
40class CPUPressureSample:
41 def __init__(self, time, avg10, avg60, avg300, deltaTotal):
42 self.time = time
43 self.avg10 = avg10
44 self.avg60 = avg60
45 self.avg300 = avg300
46 self.deltaTotal = deltaTotal
47
48class IOPressureSample:
49 def __init__(self, time, avg10, avg60, avg300, deltaTotal):
50 self.time = time
51 self.avg10 = avg10
52 self.avg60 = avg60
53 self.avg300 = avg300
54 self.deltaTotal = deltaTotal
55
56class MemPressureSample:
57 def __init__(self, time, avg10, avg60, avg300, deltaTotal):
58 self.time = time
59 self.avg10 = avg10
60 self.avg60 = avg60
61 self.avg300 = avg300
62 self.deltaTotal = deltaTotal
63
64
40class MemSample: 65class MemSample:
41 used_values = ('MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapTotal', 'SwapFree',) 66 used_values = ('MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapTotal', 'SwapFree',)
42 67
diff --git a/scripts/pythondeps b/scripts/pythondeps
index be21dd84eb..48277ec28a 100755
--- a/scripts/pythondeps
+++ b/scripts/pythondeps
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Determine dependencies of python scripts or available python modules in a search path. 7# Determine dependencies of python scripts or available python modules in a search path.
diff --git a/scripts/relocate_sdk.py b/scripts/relocate_sdk.py
index 8c0fdb986a..8a728720ba 100755
--- a/scripts/relocate_sdk.py
+++ b/scripts/relocate_sdk.py
@@ -30,9 +30,16 @@ else:
30old_prefix = re.compile(b("##DEFAULT_INSTALL_DIR##")) 30old_prefix = re.compile(b("##DEFAULT_INSTALL_DIR##"))
31 31
32def get_arch(): 32def get_arch():
33 global endian_prefix
33 f.seek(0) 34 f.seek(0)
34 e_ident =f.read(16) 35 e_ident =f.read(16)
35 ei_mag0,ei_mag1_3,ei_class = struct.unpack("<B3sB11x", e_ident) 36 ei_mag0,ei_mag1_3,ei_class,ei_data,ei_version = struct.unpack("<B3sBBB9x", e_ident)
37
38 # ei_data = 1 for little-endian & 0 for big-endian
39 if ei_data == 1:
40 endian_prefix = '<'
41 else:
42 endian_prefix = '>'
36 43
37 if (ei_mag0 != 0x7f and ei_mag1_3 != "ELF") or ei_class == 0: 44 if (ei_mag0 != 0x7f and ei_mag1_3 != "ELF") or ei_class == 0:
38 return 0 45 return 0
@@ -51,11 +58,11 @@ def parse_elf_header():
51 58
52 if arch == 32: 59 if arch == 32:
53 # 32bit 60 # 32bit
54 hdr_fmt = "<HHILLLIHHHHHH" 61 hdr_fmt = endian_prefix + "HHILLLIHHHHHH"
55 hdr_size = 52 62 hdr_size = 52
56 else: 63 else:
57 # 64bit 64 # 64bit
58 hdr_fmt = "<HHIQQQIHHHHHH" 65 hdr_fmt = endian_prefix + "HHIQQQIHHHHHH"
59 hdr_size = 64 66 hdr_size = 64
60 67
61 e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, e_flags,\ 68 e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, e_flags,\
@@ -64,9 +71,9 @@ def parse_elf_header():
64 71
65def change_interpreter(elf_file_name): 72def change_interpreter(elf_file_name):
66 if arch == 32: 73 if arch == 32:
67 ph_fmt = "<IIIIIIII" 74 ph_fmt = endian_prefix + "IIIIIIII"
68 else: 75 else:
69 ph_fmt = "<IIQQQQQQ" 76 ph_fmt = endian_prefix + "IIQQQQQQ"
70 77
71 """ look for PT_INTERP section """ 78 """ look for PT_INTERP section """
72 for i in range(0,e_phnum): 79 for i in range(0,e_phnum):
@@ -97,25 +104,26 @@ def change_interpreter(elf_file_name):
97 if (len(new_dl_path) >= p_filesz): 104 if (len(new_dl_path) >= p_filesz):
98 print("ERROR: could not relocate %s, interp size = %i and %i is needed." \ 105 print("ERROR: could not relocate %s, interp size = %i and %i is needed." \
99 % (elf_file_name, p_memsz, len(new_dl_path) + 1)) 106 % (elf_file_name, p_memsz, len(new_dl_path) + 1))
100 break 107 return False
101 dl_path = new_dl_path + b("\0") * (p_filesz - len(new_dl_path)) 108 dl_path = new_dl_path + b("\0") * (p_filesz - len(new_dl_path))
102 f.seek(p_offset) 109 f.seek(p_offset)
103 f.write(dl_path) 110 f.write(dl_path)
104 break 111 break
112 return True
105 113
106def change_dl_sysdirs(elf_file_name): 114def change_dl_sysdirs(elf_file_name):
107 if arch == 32: 115 if arch == 32:
108 sh_fmt = "<IIIIIIIIII" 116 sh_fmt = endian_prefix + "IIIIIIIIII"
109 else: 117 else:
110 sh_fmt = "<IIQQQQIIQQ" 118 sh_fmt = endian_prefix + "IIQQQQIIQQ"
111 119
112 """ read section string table """ 120 """ read section string table """
113 f.seek(e_shoff + e_shstrndx * e_shentsize) 121 f.seek(e_shoff + e_shstrndx * e_shentsize)
114 sh_hdr = f.read(e_shentsize) 122 sh_hdr = f.read(e_shentsize)
115 if arch == 32: 123 if arch == 32:
116 sh_offset, sh_size = struct.unpack("<16xII16x", sh_hdr) 124 sh_offset, sh_size = struct.unpack(endian_prefix + "16xII16x", sh_hdr)
117 else: 125 else:
118 sh_offset, sh_size = struct.unpack("<24xQQ24x", sh_hdr) 126 sh_offset, sh_size = struct.unpack(endian_prefix + "24xQQ24x", sh_hdr)
119 127
120 f.seek(sh_offset) 128 f.seek(sh_offset)
121 sh_strtab = f.read(sh_size) 129 sh_strtab = f.read(sh_size)
@@ -215,6 +223,7 @@ else:
215 223
216executables_list = sys.argv[3:] 224executables_list = sys.argv[3:]
217 225
226errors = False
218for e in executables_list: 227for e in executables_list:
219 perms = os.stat(e)[stat.ST_MODE] 228 perms = os.stat(e)[stat.ST_MODE]
220 if os.access(e, os.W_OK|os.R_OK): 229 if os.access(e, os.W_OK|os.R_OK):
@@ -240,7 +249,8 @@ for e in executables_list:
240 arch = get_arch() 249 arch = get_arch()
241 if arch: 250 if arch:
242 parse_elf_header() 251 parse_elf_header()
243 change_interpreter(e) 252 if not change_interpreter(e):
253 errors = True
244 change_dl_sysdirs(e) 254 change_dl_sysdirs(e)
245 255
246 """ change permissions back """ 256 """ change permissions back """
@@ -253,3 +263,6 @@ for e in executables_list:
253 print("New file size for %s is different. Looks like a relocation error!", e) 263 print("New file size for %s is different. Looks like a relocation error!", e)
254 sys.exit(-1) 264 sys.exit(-1)
255 265
266if errors:
267 print("Relocation of one or more executables failed.")
268 sys.exit(-1)
diff --git a/scripts/rpm2cpio.sh b/scripts/rpm2cpio.sh
index 7cd771bbe7..8199b43784 100755
--- a/scripts/rpm2cpio.sh
+++ b/scripts/rpm2cpio.sh
@@ -7,7 +7,7 @@ fatal() {
7} 7}
8 8
9pkg="$1" 9pkg="$1"
10[ -n "$pkg" -a -e "$pkg" ] || 10[ -n "$pkg" ] && [ -e "$pkg" ] ||
11 fatal "No package supplied" 11 fatal "No package supplied"
12 12
13_dd() { 13_dd() {
@@ -16,14 +16,23 @@ _dd() {
16} 16}
17 17
18calcsize() { 18calcsize() {
19
20 case "$(_dd $1 bs=4 count=1 | tr -d '\0')" in
21 "$(printf '\216\255\350')"*) ;; # '\x8e\xad\xe8'
22 *) fatal "File doesn't look like rpm: $pkg" ;;
23 esac
24
19 offset=$(($1 + 8)) 25 offset=$(($1 + 8))
20 26
21 local i b b0 b1 b2 b3 b4 b5 b6 b7 27 local i b b0 b1 b2 b3 b4 b5 b6 b7
22 28
23 i=0 29 i=0
24 while [ $i -lt 8 ]; do 30 while [ $i -lt 8 ]; do
25 b=$(_dd $(($offset + $i)) bs=1 count=1; echo X) 31 # add . to not loose \n
26 b=${b%X} 32 # strip \0 as it gets dropped with warning otherwise
33 b="$(_dd $(($offset + $i)) bs=1 count=1 | tr -d '\0' ; echo .)"
34 b=${b%.} # strip . again
35
27 [ -z "$b" ] && 36 [ -z "$b" ] &&
28 b="0" || 37 b="0" ||
29 b="$(exec printf '%u\n' "'$b")" 38 b="$(exec printf '%u\n' "'$b")"
@@ -35,7 +44,7 @@ calcsize() {
35 offset=$(($offset + $rsize)) 44 offset=$(($offset + $rsize))
36} 45}
37 46
38case "$(_dd 0 bs=8 count=1)" in 47case "$(_dd 0 bs=4 count=1 | tr -d '\0')" in
39 "$(printf '\355\253\356\333')"*) ;; # '\xed\xab\xee\xdb' 48 "$(printf '\355\253\356\333')"*) ;; # '\xed\xab\xee\xdb'
40 *) fatal "File doesn't look like rpm: $pkg" ;; 49 *) fatal "File doesn't look like rpm: $pkg" ;;
41esac 50esac
@@ -46,10 +55,11 @@ sigsize=$rsize
46calcsize $(($offset + (8 - ($sigsize % 8)) % 8)) 55calcsize $(($offset + (8 - ($sigsize % 8)) % 8))
47hdrsize=$rsize 56hdrsize=$rsize
48 57
49case "$(_dd $offset bs=3 count=1)" in 58case "$(_dd $offset bs=2 count=1 | tr -d '\0')" in
50 "$(printf '\102\132')"*) _dd $offset | bunzip2 ;; # '\x42\x5a' 59 "$(printf '\102\132')") _dd $offset | bunzip2 ;; # '\x42\x5a'
51 "$(printf '\037\213')"*) _dd $offset | gunzip ;; # '\x1f\x8b' 60 "$(printf '\037\213')") _dd $offset | gunzip ;; # '\x1f\x8b'
52 "$(printf '\375\067')"*) _dd $offset | xzcat ;; # '\xfd\x37' 61 "$(printf '\375\067')") _dd $offset | xzcat ;; # '\xfd\x37'
53 "$(printf '\135\000')"*) _dd $offset | unlzma ;; # '\x5d\x00' 62 "$(printf '\135')") _dd $offset | unlzma ;; # '\x5d\x00'
54 *) fatal "Unrecognized rpm file: $pkg" ;; 63 "$(printf '\050\265')") _dd $offset | unzstd ;; # '\x28\xb5'
64 *) fatal "Unrecognized payload compression format in rpm file: $pkg" ;;
55esac 65esac
diff --git a/scripts/runqemu b/scripts/runqemu
index 532f2e338d..69cd44864e 100755
--- a/scripts/runqemu
+++ b/scripts/runqemu
@@ -18,6 +18,7 @@ import shutil
18import glob 18import glob
19import configparser 19import configparser
20import signal 20import signal
21import time
21 22
22class RunQemuError(Exception): 23class RunQemuError(Exception):
23 """Custom exception to raise on known errors.""" 24 """Custom exception to raise on known errors."""
@@ -65,20 +66,25 @@ of the following environment variables (in any order):
65 MACHINE - the machine name (optional, autodetected from KERNEL filename if unspecified) 66 MACHINE - the machine name (optional, autodetected from KERNEL filename if unspecified)
66 Simplified QEMU command-line options can be passed with: 67 Simplified QEMU command-line options can be passed with:
67 nographic - disable video console 68 nographic - disable video console
69 nonetwork - disable network connectivity
68 novga - Disable VGA emulation completely 70 novga - Disable VGA emulation completely
69 sdl - choose the SDL UI frontend 71 sdl - choose the SDL UI frontend
70 gtk - choose the Gtk UI frontend 72 gtk - choose the Gtk UI frontend
71 gl - enable virgl-based GL acceleration (also needs gtk or sdl options) 73 gl - enable virgl-based GL acceleration (also needs gtk or sdl options)
72 gl-es - enable virgl-based GL acceleration, using OpenGL ES (also needs gtk or sdl options) 74 gl-es - enable virgl-based GL acceleration, using OpenGL ES (also needs gtk or sdl options)
73 egl-headless - enable headless EGL output; use vnc (via publicvnc option) or spice to see it 75 egl-headless - enable headless EGL output; use vnc (via publicvnc option) or spice to see it
76 (hint: if /dev/dri/renderD* is absent due to lack of suitable GPU, 'modprobe vgem' will create
77 one suitable for mesa llvmpipe software renderer)
74 serial - enable a serial console on /dev/ttyS0 78 serial - enable a serial console on /dev/ttyS0
75 serialstdio - enable a serial console on the console (regardless of graphics mode) 79 serialstdio - enable a serial console on the console (regardless of graphics mode)
76 slirp - enable user networking, no root privileges is required 80 slirp - enable user networking, no root privilege is required
77 snapshot - don't write changes to back to images 81 snapshot - don't write changes back to images
78 kvm - enable KVM when running x86/x86_64 (VT-capable CPU required) 82 kvm - enable KVM when running x86/x86_64 (VT-capable CPU required)
79 kvm-vhost - enable KVM with vhost when running x86/x86_64 (VT-capable CPU required) 83 kvm-vhost - enable KVM with vhost when running x86/x86_64 (VT-capable CPU required)
80 publicvnc - enable a VNC server open to all hosts 84 publicvnc - enable a VNC server open to all hosts
81 audio - enable audio 85 audio - enable audio
86 guestagent - enable guest agent communication
87 qmp=<path> - create a QMP socket (defaults to unix:qmp.sock if unspecified)
82 [*/]ovmf* - OVMF firmware file or base name for booting with UEFI 88 [*/]ovmf* - OVMF firmware file or base name for booting with UEFI
83 tcpserial=<port> - specify tcp serial port number 89 tcpserial=<port> - specify tcp serial port number
84 qemuparams=<xyz> - specify custom parameters to QEMU 90 qemuparams=<xyz> - specify custom parameters to QEMU
@@ -113,10 +119,10 @@ def check_tun():
113 if not os.access(dev_tun, os.W_OK): 119 if not os.access(dev_tun, os.W_OK):
114 raise RunQemuError("TUN control device %s is not writable, please fix (e.g. sudo chmod 666 %s)" % (dev_tun, dev_tun)) 120 raise RunQemuError("TUN control device %s is not writable, please fix (e.g. sudo chmod 666 %s)" % (dev_tun, dev_tun))
115 121
116def get_first_file(cmds): 122def get_first_file(globs):
117 """Return first file found in wildcard cmds""" 123 """Return first file found in wildcard globs"""
118 for cmd in cmds: 124 for g in globs:
119 all_files = glob.glob(cmd) 125 all_files = glob.glob(g)
120 if all_files: 126 if all_files:
121 for f in all_files: 127 for f in all_files:
122 if not os.path.isdir(f): 128 if not os.path.isdir(f):
@@ -139,12 +145,12 @@ class BaseConfig(object):
139 'OE_TMPDIR', 145 'OE_TMPDIR',
140 'OECORE_NATIVE_SYSROOT', 146 'OECORE_NATIVE_SYSROOT',
141 'MULTICONFIG', 147 'MULTICONFIG',
148 'SERIAL_CONSOLES',
142 ) 149 )
143 150
144 self.qemu_opt = '' 151 self.qemu_opt = ''
145 self.qemu_opt_script = '' 152 self.qemu_opt_script = ''
146 self.qemuparams = '' 153 self.qemuparams = ''
147 self.clean_nfs_dir = False
148 self.nfs_server = '' 154 self.nfs_server = ''
149 self.rootfs = '' 155 self.rootfs = ''
150 # File name(s) of a OVMF firmware file or variable store, 156 # File name(s) of a OVMF firmware file or variable store,
@@ -173,6 +179,15 @@ class BaseConfig(object):
173 self.nfs_running = False 179 self.nfs_running = False
174 self.serialconsole = False 180 self.serialconsole = False
175 self.serialstdio = False 181 self.serialstdio = False
182 self.nographic = False
183 self.nonetwork = False
184 self.sdl = False
185 self.gtk = False
186 self.gl = False
187 self.gl_es = False
188 self.egl_headless = False
189 self.publicvnc = False
190 self.novga = False
176 self.cleantap = False 191 self.cleantap = False
177 self.saved_stty = '' 192 self.saved_stty = ''
178 self.audio_enabled = False 193 self.audio_enabled = False
@@ -184,12 +199,14 @@ class BaseConfig(object):
184 self.snapshot = False 199 self.snapshot = False
185 self.wictypes = ('wic', 'wic.vmdk', 'wic.qcow2', 'wic.vdi', "wic.vhd", "wic.vhdx") 200 self.wictypes = ('wic', 'wic.vmdk', 'wic.qcow2', 'wic.vdi', "wic.vhd", "wic.vhdx")
186 self.fstypes = ('ext2', 'ext3', 'ext4', 'jffs2', 'nfs', 'btrfs', 201 self.fstypes = ('ext2', 'ext3', 'ext4', 'jffs2', 'nfs', 'btrfs',
187 'cpio.gz', 'cpio', 'ramfs', 'tar.bz2', 'tar.gz') 202 'cpio.gz', 'cpio', 'ramfs', 'tar.bz2', 'tar.gz',
203 'squashfs', 'squashfs-xz', 'squashfs-lzo',
204 'squashfs-lz4', 'squashfs-zst')
188 self.vmtypes = ('hddimg', 'iso') 205 self.vmtypes = ('hddimg', 'iso')
189 self.fsinfo = {} 206 self.fsinfo = {}
190 self.network_device = "-device e1000,netdev=net0,mac=@MAC@" 207 self.network_device = "-device e1000,netdev=net0,mac=@MAC@"
191 self.cmdline_ip_slirp = "ip=dhcp" 208 self.cmdline_ip_slirp = "ip=dhcp"
192 self.cmdline_ip_tap = "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0" 209 self.cmdline_ip_tap = "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0::eth0:off:8.8.8.8 net.ifnames=0"
193 # Use different mac section for tap and slirp to avoid 210 # Use different mac section for tap and slirp to avoid
194 # conflicts, e.g., when one is running with tap, the other is 211 # conflicts, e.g., when one is running with tap, the other is
195 # running with slirp. 212 # running with slirp.
@@ -199,9 +216,15 @@ class BaseConfig(object):
199 self.mac_tap = "52:54:00:12:34:" 216 self.mac_tap = "52:54:00:12:34:"
200 self.mac_slirp = "52:54:00:12:35:" 217 self.mac_slirp = "52:54:00:12:35:"
201 # pid of the actual qemu process 218 # pid of the actual qemu process
202 self.qemupid = None 219 self.qemu_environ = os.environ.copy()
220 self.qemuprocess = None
203 # avoid cleanup twice 221 # avoid cleanup twice
204 self.cleaned = False 222 self.cleaned = False
223 # Files to cleanup after run
224 self.cleanup_files = []
225 self.qmp = None
226 self.guest_agent = False
227 self.guest_agent_sockpath = '/tmp/qga.sock'
205 228
206 def acquire_taplock(self, error=True): 229 def acquire_taplock(self, error=True):
207 logger.debug("Acquiring lockfile %s..." % self.taplock) 230 logger.debug("Acquiring lockfile %s..." % self.taplock)
@@ -223,9 +246,12 @@ class BaseConfig(object):
223 def release_taplock(self): 246 def release_taplock(self):
224 if self.taplock_descriptor: 247 if self.taplock_descriptor:
225 logger.debug("Releasing lockfile for tap device '%s'" % self.tap) 248 logger.debug("Releasing lockfile for tap device '%s'" % self.tap)
226 fcntl.flock(self.taplock_descriptor, fcntl.LOCK_UN) 249 # We pass the fd to the qemu process and if we unlock here, it would unlock for
250 # that too. Therefore don't unlock, just close
251 # fcntl.flock(self.taplock_descriptor, fcntl.LOCK_UN)
227 self.taplock_descriptor.close() 252 self.taplock_descriptor.close()
228 os.remove(self.taplock) 253 # Removing the file is a potential race, don't do that either
254 # os.remove(self.taplock)
229 self.taplock_descriptor = None 255 self.taplock_descriptor = None
230 256
231 def check_free_port(self, host, port, lockdir): 257 def check_free_port(self, host, port, lockdir):
@@ -263,17 +289,23 @@ class BaseConfig(object):
263 289
264 def release_portlock(self, lockfile=None): 290 def release_portlock(self, lockfile=None):
265 if lockfile != None: 291 if lockfile != None:
266 logger.debug("Releasing lockfile '%s'" % lockfile) 292 logger.debug("Releasing lockfile '%s'" % lockfile)
267 fcntl.flock(self.portlocks[lockfile], fcntl.LOCK_UN) 293 # We pass the fd to the qemu process and if we unlock here, it would unlock for
268 self.portlocks[lockfile].close() 294 # that too. Therefore don't unlock, just close
269 os.remove(lockfile) 295 # fcntl.flock(self.portlocks[lockfile], fcntl.LOCK_UN)
270 del self.portlocks[lockfile] 296 self.portlocks[lockfile].close()
297 # Removing the file is a potential race, don't do that either
298 # os.remove(lockfile)
299 del self.portlocks[lockfile]
271 elif len(self.portlocks): 300 elif len(self.portlocks):
272 for lockfile, descriptor in self.portlocks.items(): 301 for lockfile, descriptor in self.portlocks.items():
273 logger.debug("Releasing lockfile '%s'" % lockfile) 302 logger.debug("Releasing lockfile '%s'" % lockfile)
274 fcntl.flock(descriptor, fcntl.LOCK_UN) 303 # We pass the fd to the qemu process and if we unlock here, it would unlock for
304 # that too. Therefore don't unlock, just close
305 # fcntl.flock(descriptor, fcntl.LOCK_UN)
275 descriptor.close() 306 descriptor.close()
276 os.remove(lockfile) 307 # Removing the file is a potential race, don't do that either
308 # os.remove(lockfile)
277 self.portlocks = {} 309 self.portlocks = {}
278 310
279 def get(self, key): 311 def get(self, key):
@@ -331,21 +363,21 @@ class BaseConfig(object):
331 def check_arg_path(self, p): 363 def check_arg_path(self, p):
332 """ 364 """
333 - Check whether it is <image>.qemuboot.conf or contains <image>.qemuboot.conf 365 - Check whether it is <image>.qemuboot.conf or contains <image>.qemuboot.conf
334 - Check whether is a kernel file 366 - Check whether it is a kernel file
335 - Check whether is a image file 367 - Check whether it is an image file
336 - Check whether it is a nfs dir 368 - Check whether it is an NFS dir
337 - Check whether it is a OVMF flash file 369 - Check whether it is an OVMF flash file
338 """ 370 """
339 if p.endswith('.qemuboot.conf'): 371 if p.endswith('.qemuboot.conf'):
340 self.qemuboot = p 372 self.qemuboot = p
341 self.qbconfload = True 373 self.qbconfload = True
342 elif re.search('\.bin$', p) or re.search('bzImage', p) or \ 374 elif re.search('\\.bin$', p) or re.search('bzImage', p) or \
343 re.search('zImage', p) or re.search('vmlinux', p) or \ 375 re.search('zImage', p) or re.search('vmlinux', p) or \
344 re.search('fitImage', p) or re.search('uImage', p): 376 re.search('fitImage', p) or re.search('uImage', p):
345 self.kernel = p 377 self.kernel = p
346 elif os.path.exists(p) and (not os.path.isdir(p)) and '-image-' in os.path.basename(p): 378 elif os.path.isfile(p) and ('-image-' in os.path.basename(p) or '.rootfs.' in os.path.basename(p)):
347 self.rootfs = p 379 self.rootfs = p
348 # Check filename against self.fstypes can hanlde <file>.cpio.gz, 380 # Check filename against self.fstypes can handle <file>.cpio.gz,
349 # otherwise, its type would be "gz", which is incorrect. 381 # otherwise, its type would be "gz", which is incorrect.
350 fst = "" 382 fst = ""
351 for t in self.fstypes: 383 for t in self.fstypes:
@@ -353,18 +385,24 @@ class BaseConfig(object):
353 fst = t 385 fst = t
354 break 386 break
355 if not fst: 387 if not fst:
356 m = re.search('.*\.(.*)$', self.rootfs) 388 m = re.search('.*\\.(.*)$', self.rootfs)
357 if m: 389 if m:
358 fst = m.group(1) 390 fst = m.group(1)
359 if fst: 391 if fst:
360 self.check_arg_fstype(fst) 392 self.check_arg_fstype(fst)
361 qb = re.sub('\.' + fst + "$", '', self.rootfs) 393 qb = re.sub('\\.' + fst + "$", '.qemuboot.conf', self.rootfs)
362 qb = '%s%s' % (re.sub('\.rootfs$', '', qb), '.qemuboot.conf')
363 if os.path.exists(qb): 394 if os.path.exists(qb):
364 self.qemuboot = qb 395 self.qemuboot = qb
365 self.qbconfload = True 396 self.qbconfload = True
366 else: 397 else:
367 logger.warning("%s doesn't exist" % qb) 398 logger.warning("%s doesn't exist, will try to remove '.rootfs' from filename" % qb)
399 # They to remove .rootfs (IMAGE_NAME_SUFFIX) as well
400 qb = re.sub('\\.rootfs.qemuboot.conf$', '.qemuboot.conf', qb)
401 if os.path.exists(qb):
402 self.qemuboot = qb
403 self.qbconfload = True
404 else:
405 logger.warning("%s doesn't exist" % qb)
368 else: 406 else:
369 raise RunQemuError("Can't find FSTYPE from: %s" % p) 407 raise RunQemuError("Can't find FSTYPE from: %s" % p)
370 408
@@ -398,6 +436,7 @@ class BaseConfig(object):
398 # are there other scenarios in which we need to support being 436 # are there other scenarios in which we need to support being
399 # invoked by bitbake? 437 # invoked by bitbake?
400 deploy = self.get('DEPLOY_DIR_IMAGE') 438 deploy = self.get('DEPLOY_DIR_IMAGE')
439 image_link_name = self.get('IMAGE_LINK_NAME')
401 bbchild = deploy and self.get('OE_TMPDIR') 440 bbchild = deploy and self.get('OE_TMPDIR')
402 if bbchild: 441 if bbchild:
403 self.set_machine_deploy_dir(arg, deploy) 442 self.set_machine_deploy_dir(arg, deploy)
@@ -422,23 +461,24 @@ class BaseConfig(object):
422 else: 461 else:
423 logger.error("%s not a directory valid DEPLOY_DIR_IMAGE" % deploy_dir_image) 462 logger.error("%s not a directory valid DEPLOY_DIR_IMAGE" % deploy_dir_image)
424 self.set("MACHINE", arg) 463 self.set("MACHINE", arg)
464 if not image_link_name:
465 s = re.search('^IMAGE_LINK_NAME="(.*)"', self.bitbake_e, re.M)
466 if s:
467 image_link_name = s.group(1)
468 self.set("IMAGE_LINK_NAME", image_link_name)
469 logger.debug('Using IMAGE_LINK_NAME = "%s"' % image_link_name)
425 470
426 def set_dri_path(self): 471 def set_dri_path(self):
427 # As runqemu can be run within bitbake (when using testimage, for example), 472 drivers_path = os.path.join(self.bindir_native, '../lib/dri')
428 # we need to ensure that we run host pkg-config, and that it does not 473 if not os.path.exists(drivers_path) or not os.listdir(drivers_path):
429 # get mis-directed to native build paths set by bitbake. 474 raise RunQemuError("""
430 try: 475qemu has been built without opengl support and accelerated graphics support is not available.
431 del os.environ['PKG_CONFIG_PATH'] 476To enable it, add:
432 del os.environ['PKG_CONFIG_DIR'] 477DISTRO_FEATURES_NATIVE:append = " opengl"
433 del os.environ['PKG_CONFIG_LIBDIR'] 478DISTRO_FEATURES_NATIVESDK:append = " opengl"
434 del os.environ['PKG_CONFIG_SYSROOT_DIR'] 479to your build configuration.
435 except KeyError: 480""")
436 pass 481 self.qemu_environ['LIBGL_DRIVERS_PATH'] = drivers_path
437 try:
438 dripath = subprocess.check_output("PATH=/bin:/usr/bin:$PATH pkg-config --variable=dridriverdir dri", shell=True)
439 except subprocess.CalledProcessError as e:
440 raise RunQemuError("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.")
441 os.environ['LIBGL_DRIVERS_PATH'] = dripath.decode('utf-8').strip()
442 482
443 def check_args(self): 483 def check_args(self):
444 for debug in ("-d", "--debug"): 484 for debug in ("-d", "--debug"):
@@ -452,51 +492,32 @@ class BaseConfig(object):
452 sys.argv.remove(quiet) 492 sys.argv.remove(quiet)
453 493
454 if 'gl' not in sys.argv[1:] and 'gl-es' not in sys.argv[1:]: 494 if 'gl' not in sys.argv[1:] and 'gl-es' not in sys.argv[1:]:
455 os.environ['SDL_RENDER_DRIVER'] = 'software' 495 self.qemu_environ['SDL_RENDER_DRIVER'] = 'software'
496 self.qemu_environ['SDL_FRAMEBUFFER_ACCELERATION'] = 'false'
456 497
457 unknown_arg = "" 498 unknown_arg = ""
458 for arg in sys.argv[1:]: 499 for arg in sys.argv[1:]:
459 if arg in self.fstypes + self.vmtypes + self.wictypes: 500 if arg in self.fstypes + self.vmtypes + self.wictypes:
460 self.check_arg_fstype(arg) 501 self.check_arg_fstype(arg)
461 elif arg == 'nographic': 502 elif arg == 'nographic':
462 if ('sdl' in sys.argv): 503 self.nographic = True
463 raise RunQemuError('Option nographic makes no sense alongside the sdl option.' % (arg)) 504 elif arg == "nonetwork":
464 if ('gtk' in sys.argv): 505 self.nonetwork = True
465 raise RunQemuError('Option nographic makes no sense alongside the gtk option.' % (arg))
466 self.qemu_opt_script += ' -nographic'
467 self.kernel_cmdline_script += ' console=ttyS0'
468 elif arg == 'sdl': 506 elif arg == 'sdl':
469 if 'gl' in sys.argv[1:]: 507 self.sdl = True
470 self.set_dri_path()
471 self.qemu_opt_script += ' -vga virtio -display sdl,gl=on,show-cursor=on'
472 elif 'gl-es' in sys.argv[1:]:
473 self.set_dri_path()
474 self.qemu_opt_script += ' -vga virtio -display sdl,gl=es,show-cursor=on'
475 else:
476 self.qemu_opt_script += ' -display sdl,show-cursor=on'
477 elif arg == 'gtk': 508 elif arg == 'gtk':
478 if 'gl' in sys.argv[1:]: 509 self.gtk = True
479 self.set_dri_path() 510 elif arg == 'gl':
480 self.qemu_opt_script += ' -vga virtio -display gtk,gl=on,show-cursor=on' 511 self.gl = True
481 elif 'gl-es' in sys.argv[1:]: 512 elif arg == 'gl-es':
482 self.set_dri_path() 513 self.gl_es = True
483 self.qemu_opt_script += ' -vga virtio -display gtk,gl=es,show-cursor=on'
484 else:
485 self.qemu_opt_script += ' -display gtk,show-cursor=on'
486 elif arg == 'gl' or arg == 'gl-es':
487 # These args are handled inside sdl or gtk blocks above
488 if ('gtk' not in sys.argv) and ('sdl' not in sys.argv):
489 raise RunQemuError('Option %s also needs gtk or sdl option.' % (arg))
490 elif arg == 'egl-headless': 514 elif arg == 'egl-headless':
491 self.set_dri_path() 515 self.egl_headless = True
492 self.qemu_opt_script += ' -vga virtio -display egl-headless,show-cursor=on'
493 elif arg == 'novga': 516 elif arg == 'novga':
494 self.qemu_opt_script += ' -vga none' 517 self.novga = True
495 elif arg == 'serial': 518 elif arg == 'serial':
496 self.kernel_cmdline_script += ' console=ttyS0'
497 self.serialconsole = True 519 self.serialconsole = True
498 elif arg == "serialstdio": 520 elif arg == "serialstdio":
499 self.kernel_cmdline_script += ' console=ttyS0'
500 self.serialstdio = True 521 self.serialstdio = True
501 elif arg == 'audio': 522 elif arg == 'audio':
502 logger.info("Enabling audio in qemu") 523 logger.info("Enabling audio in qemu")
@@ -513,7 +534,16 @@ class BaseConfig(object):
513 elif arg == 'snapshot': 534 elif arg == 'snapshot':
514 self.snapshot = True 535 self.snapshot = True
515 elif arg == 'publicvnc': 536 elif arg == 'publicvnc':
537 self.publicvnc = True
516 self.qemu_opt_script += ' -vnc :0' 538 self.qemu_opt_script += ' -vnc :0'
539 elif arg == 'guestagent':
540 self.guest_agent = True
541 elif arg == "qmp":
542 self.qmp = "unix:qmp.sock"
543 elif arg.startswith("qmp="):
544 self.qmp = arg[len('qmp='):]
545 elif arg.startswith('guestagent-sockpath='):
546 self.guest_agent_sockpath = '%s' % arg[len('guestagent-sockpath='):]
517 elif arg.startswith('tcpserial='): 547 elif arg.startswith('tcpserial='):
518 self.tcpserial_portnum = '%s' % arg[len('tcpserial='):] 548 self.tcpserial_portnum = '%s' % arg[len('tcpserial='):]
519 elif arg.startswith('qemuparams='): 549 elif arg.startswith('qemuparams='):
@@ -545,21 +575,28 @@ class BaseConfig(object):
545 self.check_arg_machine(unknown_arg) 575 self.check_arg_machine(unknown_arg)
546 576
547 if not (self.get('DEPLOY_DIR_IMAGE') or self.qbconfload): 577 if not (self.get('DEPLOY_DIR_IMAGE') or self.qbconfload):
548 self.load_bitbake_env() 578 self.load_bitbake_env(target=self.rootfs)
549 s = re.search('^DEPLOY_DIR_IMAGE="(.*)"', self.bitbake_e, re.M) 579 s = re.search('^DEPLOY_DIR_IMAGE="(.*)"', self.bitbake_e, re.M)
550 if s: 580 if s:
551 self.set("DEPLOY_DIR_IMAGE", s.group(1)) 581 self.set("DEPLOY_DIR_IMAGE", s.group(1))
552 582
583 if not self.get('IMAGE_LINK_NAME') and self.rootfs:
584 s = re.search('^IMAGE_LINK_NAME="(.*)"', self.bitbake_e, re.M)
585 if s:
586 image_link_name = s.group(1)
587 self.set("IMAGE_LINK_NAME", image_link_name)
588 logger.debug('Using IMAGE_LINK_NAME = "%s"' % image_link_name)
589
553 def check_kvm(self): 590 def check_kvm(self):
554 """Check kvm and kvm-host""" 591 """Check kvm and kvm-host"""
555 if not (self.kvm_enabled or self.vhost_enabled): 592 if not (self.kvm_enabled or self.vhost_enabled):
556 self.qemu_opt_script += ' %s %s' % (self.get('QB_MACHINE'), self.get('QB_CPU')) 593 self.qemu_opt_script += ' %s %s %s' % (self.get('QB_MACHINE'), self.get('QB_CPU'), self.get('QB_SMP'))
557 return 594 return
558 595
559 if not self.get('QB_CPU_KVM'): 596 if not self.get('QB_CPU_KVM'):
560 raise RunQemuError("QB_CPU_KVM is NULL, this board doesn't support kvm") 597 raise RunQemuError("QB_CPU_KVM is NULL, this board doesn't support kvm")
561 598
562 self.qemu_opt_script += ' %s %s' % (self.get('QB_MACHINE'), self.get('QB_CPU_KVM')) 599 self.qemu_opt_script += ' %s %s %s' % (self.get('QB_MACHINE'), self.get('QB_CPU_KVM'), self.get('QB_SMP'))
563 yocto_kvm_wiki = "https://wiki.yoctoproject.org/wiki/How_to_enable_KVM_for_Poky_qemu" 600 yocto_kvm_wiki = "https://wiki.yoctoproject.org/wiki/How_to_enable_KVM_for_Poky_qemu"
564 yocto_paravirt_kvm_wiki = "https://wiki.yoctoproject.org/wiki/Running_an_x86_Yocto_Linux_image_under_QEMU_KVM" 601 yocto_paravirt_kvm_wiki = "https://wiki.yoctoproject.org/wiki/Running_an_x86_Yocto_Linux_image_under_QEMU_KVM"
565 dev_kvm = '/dev/kvm' 602 dev_kvm = '/dev/kvm'
@@ -579,11 +616,6 @@ class BaseConfig(object):
579 616
580 if os.access(dev_kvm, os.W_OK|os.R_OK): 617 if os.access(dev_kvm, os.W_OK|os.R_OK):
581 self.qemu_opt_script += ' -enable-kvm' 618 self.qemu_opt_script += ' -enable-kvm'
582 if self.get('MACHINE') == "qemux86":
583 # Workaround for broken APIC window on pre 4.15 host kernels which causes boot hangs
584 # See YOCTO #12301
585 # On 64 bit we use x2apic
586 self.kernel_cmdline_script += " clocksource=kvm-clock hpet=disable noapic nolapic"
587 else: 619 else:
588 logger.error("You have no read or write permission on /dev/kvm.") 620 logger.error("You have no read or write permission on /dev/kvm.")
589 logger.error("Please change the ownership of this file as described at:") 621 logger.error("Please change the ownership of this file as described at:")
@@ -624,10 +656,10 @@ class BaseConfig(object):
624 elif fsflag == 'kernel-in-fs': 656 elif fsflag == 'kernel-in-fs':
625 wic_fs = False 657 wic_fs = False
626 else: 658 else:
627 logger.warn('Unknown flag "%s:%s" in QB_FSINFO', fstype, fsflag) 659 logger.warning('Unknown flag "%s:%s" in QB_FSINFO', fstype, fsflag)
628 continue 660 continue
629 else: 661 else:
630 logger.warn('QB_FSINFO is not supported for image type "%s"', fstype) 662 logger.warning('QB_FSINFO is not supported for image type "%s"', fstype)
631 continue 663 continue
632 664
633 if fstype in self.fsinfo: 665 if fstype in self.fsinfo:
@@ -660,16 +692,16 @@ class BaseConfig(object):
660 692
661 if self.rootfs and not os.path.exists(self.rootfs): 693 if self.rootfs and not os.path.exists(self.rootfs):
662 # Lazy rootfs 694 # Lazy rootfs
663 self.rootfs = "%s/%s-%s.%s" % (self.get('DEPLOY_DIR_IMAGE'), 695 self.rootfs = "%s/%s.%s" % (self.get('DEPLOY_DIR_IMAGE'),
664 self.rootfs, self.get('MACHINE'), 696 self.get('IMAGE_LINK_NAME'),
665 self.fstype) 697 self.fstype)
666 elif not self.rootfs: 698 elif not self.rootfs:
667 cmd_name = '%s/%s*.%s' % (self.get('DEPLOY_DIR_IMAGE'), self.get('IMAGE_NAME'), self.fstype) 699 glob_name = '%s/%s*.%s' % (self.get('DEPLOY_DIR_IMAGE'), self.get('IMAGE_NAME'), self.fstype)
668 cmd_link = '%s/%s*.%s' % (self.get('DEPLOY_DIR_IMAGE'), self.get('IMAGE_LINK_NAME'), self.fstype) 700 glob_link = '%s/%s*.%s' % (self.get('DEPLOY_DIR_IMAGE'), self.get('IMAGE_LINK_NAME'), self.fstype)
669 cmds = (cmd_name, cmd_link) 701 globs = (glob_name, glob_link)
670 self.rootfs = get_first_file(cmds) 702 self.rootfs = get_first_file(globs)
671 if not self.rootfs: 703 if not self.rootfs:
672 raise RunQemuError("Failed to find rootfs: %s or %s" % cmds) 704 raise RunQemuError("Failed to find rootfs: %s or %s" % globs)
673 705
674 if not os.path.exists(self.rootfs): 706 if not os.path.exists(self.rootfs):
675 raise RunQemuError("Can't find rootfs: %s" % self.rootfs) 707 raise RunQemuError("Can't find rootfs: %s" % self.rootfs)
@@ -729,10 +761,10 @@ class BaseConfig(object):
729 kernel_match_name = "%s/%s" % (deploy_dir_image, kernel_name) 761 kernel_match_name = "%s/%s" % (deploy_dir_image, kernel_name)
730 kernel_match_link = "%s/%s" % (deploy_dir_image, self.get('KERNEL_IMAGETYPE')) 762 kernel_match_link = "%s/%s" % (deploy_dir_image, self.get('KERNEL_IMAGETYPE'))
731 kernel_startswith = "%s/%s*" % (deploy_dir_image, self.get('KERNEL_IMAGETYPE')) 763 kernel_startswith = "%s/%s*" % (deploy_dir_image, self.get('KERNEL_IMAGETYPE'))
732 cmds = (kernel_match_name, kernel_match_link, kernel_startswith) 764 globs = (kernel_match_name, kernel_match_link, kernel_startswith)
733 self.kernel = get_first_file(cmds) 765 self.kernel = get_first_file(globs)
734 if not self.kernel: 766 if not self.kernel:
735 raise RunQemuError('KERNEL not found: %s, %s or %s' % cmds) 767 raise RunQemuError('KERNEL not found: %s, %s or %s' % globs)
736 768
737 if not os.path.exists(self.kernel): 769 if not os.path.exists(self.kernel):
738 raise RunQemuError("KERNEL %s not found" % self.kernel) 770 raise RunQemuError("KERNEL %s not found" % self.kernel)
@@ -749,13 +781,13 @@ class BaseConfig(object):
749 dtb = self.get('QB_DTB') 781 dtb = self.get('QB_DTB')
750 if dtb: 782 if dtb:
751 deploy_dir_image = self.get('DEPLOY_DIR_IMAGE') 783 deploy_dir_image = self.get('DEPLOY_DIR_IMAGE')
752 cmd_match = "%s/%s" % (deploy_dir_image, dtb) 784 glob_match = "%s/%s" % (deploy_dir_image, dtb)
753 cmd_startswith = "%s/%s*" % (deploy_dir_image, dtb) 785 glob_startswith = "%s/%s*" % (deploy_dir_image, dtb)
754 cmd_wild = "%s/*.dtb" % deploy_dir_image 786 glob_wild = "%s/*.dtb" % deploy_dir_image
755 cmds = (cmd_match, cmd_startswith, cmd_wild) 787 globs = (glob_match, glob_startswith, glob_wild)
756 self.dtb = get_first_file(cmds) 788 self.dtb = get_first_file(globs)
757 if not os.path.exists(self.dtb): 789 if not os.path.exists(self.dtb):
758 raise RunQemuError('DTB not found: %s, %s or %s' % cmds) 790 raise RunQemuError('DTB not found: %s, %s or %s' % globs)
759 791
760 def check_bios(self): 792 def check_bios(self):
761 """Check and set bios""" 793 """Check and set bios"""
@@ -779,7 +811,7 @@ class BaseConfig(object):
779 raise RunQemuError('BIOS not found: %s' % bios_match_name) 811 raise RunQemuError('BIOS not found: %s' % bios_match_name)
780 812
781 if not os.path.exists(self.bios): 813 if not os.path.exists(self.bios):
782 raise RunQemuError("KERNEL %s not found" % self.bios) 814 raise RunQemuError("BIOS %s not found" % self.bios)
783 815
784 816
785 def check_mem(self): 817 def check_mem(self):
@@ -806,7 +838,7 @@ class BaseConfig(object):
806 self.set('QB_MEM', qb_mem) 838 self.set('QB_MEM', qb_mem)
807 839
808 mach = self.get('MACHINE') 840 mach = self.get('MACHINE')
809 if not mach.startswith('qemumips'): 841 if not mach.startswith(('qemumips', 'qemux86', 'qemuloongarch64')):
810 self.kernel_cmdline_script += ' mem=%s' % self.get('QB_MEM').replace('-m','').strip() + 'M' 842 self.kernel_cmdline_script += ' mem=%s' % self.get('QB_MEM').replace('-m','').strip() + 'M'
811 843
812 self.qemu_opt_script += ' %s' % self.get('QB_MEM') 844 self.qemu_opt_script += ' %s' % self.get('QB_MEM')
@@ -818,11 +850,11 @@ class BaseConfig(object):
818 if self.get('QB_TCPSERIAL_OPT'): 850 if self.get('QB_TCPSERIAL_OPT'):
819 self.qemu_opt_script += ' ' + self.get('QB_TCPSERIAL_OPT').replace('@PORT@', port) 851 self.qemu_opt_script += ' ' + self.get('QB_TCPSERIAL_OPT').replace('@PORT@', port)
820 else: 852 else:
821 self.qemu_opt_script += ' -serial tcp:127.0.0.1:%s' % port 853 self.qemu_opt_script += ' -serial tcp:127.0.0.1:%s,nodelay=on' % port
822 854
823 if len(ports) > 1: 855 if len(ports) > 1:
824 for port in ports[1:]: 856 for port in ports[1:]:
825 self.qemu_opt_script += ' -serial tcp:127.0.0.1:%s' % port 857 self.qemu_opt_script += ' -serial tcp:127.0.0.1:%s,nodelay=on' % port
826 858
827 def check_and_set(self): 859 def check_and_set(self):
828 """Check configs sanity and set when needed""" 860 """Check configs sanity and set when needed"""
@@ -865,8 +897,10 @@ class BaseConfig(object):
865 machine = self.get('MACHINE') 897 machine = self.get('MACHINE')
866 if not machine: 898 if not machine:
867 machine = os.path.basename(deploy_dir_image) 899 machine = os.path.basename(deploy_dir_image)
868 self.qemuboot = "%s/%s-%s.qemuboot.conf" % (deploy_dir_image, 900 if not self.get('IMAGE_LINK_NAME'):
869 self.rootfs, machine) 901 raise RunQemuError("IMAGE_LINK_NAME wasn't set to find corresponding .qemuboot.conf file")
902 self.qemuboot = "%s/%s.qemuboot.conf" % (deploy_dir_image,
903 self.get('IMAGE_LINK_NAME'))
870 else: 904 else:
871 cmd = 'ls -t %s/*.qemuboot.conf' % deploy_dir_image 905 cmd = 'ls -t %s/*.qemuboot.conf' % deploy_dir_image
872 logger.debug('Running %s...' % cmd) 906 logger.debug('Running %s...' % cmd)
@@ -987,19 +1021,16 @@ class BaseConfig(object):
987 if self.slirp_enabled: 1021 if self.slirp_enabled:
988 self.nfs_server = '10.0.2.2' 1022 self.nfs_server = '10.0.2.2'
989 else: 1023 else:
990 self.nfs_server = '192.168.7.1' 1024 self.nfs_server = '192.168.7.@GATEWAY@'
991 1025
992 # Figure out a new nfs_instance to allow multiple qemus running. 1026 nfsd_port = 3048 + self.nfs_instance
993 ps = subprocess.check_output(("ps", "auxww")).decode('utf-8') 1027 lockdir = "/tmp/qemu-port-locks"
994 pattern = '/bin/unfsd .* -i .*\.pid -e .*/exports([0-9]+) ' 1028 self.make_lock_dir(lockdir)
995 all_instances = re.findall(pattern, ps, re.M) 1029 while not self.check_free_port('localhost', nfsd_port, lockdir):
996 if all_instances: 1030 self.nfs_instance += 1
997 all_instances.sort(key=int) 1031 nfsd_port += 1
998 self.nfs_instance = int(all_instances.pop()) + 1
999
1000 nfsd_port = 3049 + 2 * self.nfs_instance
1001 mountd_port = 3048 + 2 * self.nfs_instance
1002 1032
1033 mountd_port = nfsd_port
1003 # Export vars for runqemu-export-rootfs 1034 # Export vars for runqemu-export-rootfs
1004 export_dict = { 1035 export_dict = {
1005 'NFS_INSTANCE': self.nfs_instance, 1036 'NFS_INSTANCE': self.nfs_instance,
@@ -1010,7 +1041,11 @@ class BaseConfig(object):
1010 # Use '%s' since they are integers 1041 # Use '%s' since they are integers
1011 os.putenv(k, '%s' % v) 1042 os.putenv(k, '%s' % v)
1012 1043
1013 self.unfs_opts="nfsvers=3,port=%s,tcp,mountport=%s" % (nfsd_port, mountd_port) 1044 qb_nfsrootfs_extra_opt = self.get("QB_NFSROOTFS_EXTRA_OPT")
1045 if qb_nfsrootfs_extra_opt and not qb_nfsrootfs_extra_opt.startswith(","):
1046 qb_nfsrootfs_extra_opt = "," + qb_nfsrootfs_extra_opt
1047
1048 self.unfs_opts="nfsvers=3,port=%s,tcp,mountport=%s%s" % (nfsd_port, mountd_port, qb_nfsrootfs_extra_opt)
1014 1049
1015 # Extract .tar.bz2 or .tar.bz if no nfs dir 1050 # Extract .tar.bz2 or .tar.bz if no nfs dir
1016 if not (self.rootfs and os.path.isdir(self.rootfs)): 1051 if not (self.rootfs and os.path.isdir(self.rootfs)):
@@ -1033,22 +1068,41 @@ class BaseConfig(object):
1033 cmd = ('runqemu-extract-sdk', src, dest) 1068 cmd = ('runqemu-extract-sdk', src, dest)
1034 logger.info('Running %s...' % str(cmd)) 1069 logger.info('Running %s...' % str(cmd))
1035 if subprocess.call(cmd) != 0: 1070 if subprocess.call(cmd) != 0:
1036 raise RunQemuError('Failed to run %s' % cmd) 1071 raise RunQemuError('Failed to run %s' % str(cmd))
1037 self.clean_nfs_dir = True
1038 self.rootfs = dest 1072 self.rootfs = dest
1073 self.cleanup_files.append(self.rootfs)
1074 self.cleanup_files.append('%s.pseudo_state' % self.rootfs)
1039 1075
1040 # Start the userspace NFS server 1076 # Start the userspace NFS server
1041 cmd = ('runqemu-export-rootfs', 'start', self.rootfs) 1077 cmd = ('runqemu-export-rootfs', 'start', self.rootfs)
1042 logger.info('Running %s...' % str(cmd)) 1078 logger.info('Running %s...' % str(cmd))
1043 if subprocess.call(cmd) != 0: 1079 if subprocess.call(cmd) != 0:
1044 raise RunQemuError('Failed to run %s' % cmd) 1080 raise RunQemuError('Failed to run %s' % str(cmd))
1045 1081
1046 self.nfs_running = True 1082 self.nfs_running = True
1047 1083
1084 def setup_cmd(self):
1085 cmd = self.get('QB_SETUP_CMD')
1086 if cmd != '':
1087 logger.info('Running setup command %s' % str(cmd))
1088 if subprocess.call(cmd, shell=True) != 0:
1089 raise RunQemuError('Failed to run %s' % str(cmd))
1090
1048 def setup_net_bridge(self): 1091 def setup_net_bridge(self):
1049 self.set('NETWORK_CMD', '-netdev bridge,br=%s,id=net0,helper=%s -device virtio-net-pci,netdev=net0 ' % ( 1092 self.set('NETWORK_CMD', '-netdev bridge,br=%s,id=net0,helper=%s -device virtio-net-pci,netdev=net0 ' % (
1050 self.net_bridge, os.path.join(self.bindir_native, 'qemu-oe-bridge-helper'))) 1093 self.net_bridge, os.path.join(self.bindir_native, 'qemu-oe-bridge-helper')))
1051 1094
1095 def make_lock_dir(self, lockdir):
1096 if not os.path.exists(lockdir):
1097 # There might be a race issue when multi runqemu processess are
1098 # running at the same time.
1099 try:
1100 os.mkdir(lockdir)
1101 os.chmod(lockdir, 0o777)
1102 except FileExistsError:
1103 pass
1104 return
1105
1052 def setup_slirp(self): 1106 def setup_slirp(self):
1053 """Setup user networking""" 1107 """Setup user networking"""
1054 1108
@@ -1058,7 +1112,7 @@ class BaseConfig(object):
1058 logger.info("Network configuration:%s", netconf) 1112 logger.info("Network configuration:%s", netconf)
1059 self.kernel_cmdline_script += netconf 1113 self.kernel_cmdline_script += netconf
1060 # Port mapping 1114 # Port mapping
1061 hostfwd = ",hostfwd=tcp::2222-:22,hostfwd=tcp::2323-:23" 1115 hostfwd = ",hostfwd=tcp:127.0.0.1:2222-:22,hostfwd=tcp:127.0.0.1:2323-:23"
1062 qb_slirp_opt_default = "-netdev user,id=net0%s,tftp=%s" % (hostfwd, self.get('DEPLOY_DIR_IMAGE')) 1116 qb_slirp_opt_default = "-netdev user,id=net0%s,tftp=%s" % (hostfwd, self.get('DEPLOY_DIR_IMAGE'))
1063 qb_slirp_opt = self.get('QB_SLIRP_OPT') or qb_slirp_opt_default 1117 qb_slirp_opt = self.get('QB_SLIRP_OPT') or qb_slirp_opt_default
1064 # Figure out the port 1118 # Figure out the port
@@ -1067,14 +1121,7 @@ class BaseConfig(object):
1067 mac = 2 1121 mac = 2
1068 1122
1069 lockdir = "/tmp/qemu-port-locks" 1123 lockdir = "/tmp/qemu-port-locks"
1070 if not os.path.exists(lockdir): 1124 self.make_lock_dir(lockdir)
1071 # There might be a race issue when multi runqemu processess are
1072 # running at the same time.
1073 try:
1074 os.mkdir(lockdir)
1075 os.chmod(lockdir, 0o777)
1076 except FileExistsError:
1077 pass
1078 1125
1079 # Find a free port to avoid conflicts 1126 # Find a free port to avoid conflicts
1080 for p in ports[:]: 1127 for p in ports[:]:
@@ -1114,20 +1161,17 @@ class BaseConfig(object):
1114 logger.error("ip: %s" % ip) 1161 logger.error("ip: %s" % ip)
1115 raise OEPathError("runqemu-ifup, runqemu-ifdown or ip not found") 1162 raise OEPathError("runqemu-ifup, runqemu-ifdown or ip not found")
1116 1163
1117 if not os.path.exists(lockdir): 1164 self.make_lock_dir(lockdir)
1118 # There might be a race issue when multi runqemu processess are
1119 # running at the same time.
1120 try:
1121 os.mkdir(lockdir)
1122 os.chmod(lockdir, 0o777)
1123 except FileExistsError:
1124 pass
1125 1165
1126 cmd = (ip, 'link') 1166 cmd = (ip, 'link')
1127 logger.debug('Running %s...' % str(cmd)) 1167 logger.debug('Running %s...' % str(cmd))
1128 ip_link = subprocess.check_output(cmd).decode('utf-8') 1168 ip_link = subprocess.check_output(cmd).decode('utf-8')
1129 # Matches line like: 6: tap0: <foo> 1169 # Matches line like: 6: tap0: <foo>
1130 possibles = re.findall('^[0-9]+: +(tap[0-9]+): <.*', ip_link, re.M) 1170 oe_tap_name = 'tap'
1171 if 'OE_TAP_NAME' in os.environ:
1172 oe_tap_name = os.environ['OE_TAP_NAME']
1173 tap_re = '^[0-9]+: +(' + oe_tap_name + '[0-9]+): <.*'
1174 possibles = re.findall(tap_re, ip_link, re.M)
1131 tap = "" 1175 tap = ""
1132 for p in possibles: 1176 for p in possibles:
1133 lockfile = os.path.join(lockdir, p) 1177 lockfile = os.path.join(lockdir, p)
@@ -1150,7 +1194,7 @@ class BaseConfig(object):
1150 gid = os.getgid() 1194 gid = os.getgid()
1151 uid = os.getuid() 1195 uid = os.getuid()
1152 logger.info("Setting up tap interface under sudo") 1196 logger.info("Setting up tap interface under sudo")
1153 cmd = ('sudo', self.qemuifup, str(uid), str(gid), self.bindir_native) 1197 cmd = ('sudo', self.qemuifup, str(gid))
1154 try: 1198 try:
1155 tap = subprocess.check_output(cmd).decode('utf-8').strip() 1199 tap = subprocess.check_output(cmd).decode('utf-8').strip()
1156 except subprocess.CalledProcessError as e: 1200 except subprocess.CalledProcessError as e:
@@ -1166,7 +1210,7 @@ class BaseConfig(object):
1166 logger.error("Failed to setup tap device. Run runqemu-gen-tapdevs to manually create.") 1210 logger.error("Failed to setup tap device. Run runqemu-gen-tapdevs to manually create.")
1167 sys.exit(1) 1211 sys.exit(1)
1168 self.tap = tap 1212 self.tap = tap
1169 tapnum = int(tap[3:]) 1213 tapnum = int(tap[len(oe_tap_name):])
1170 gateway = tapnum * 2 + 1 1214 gateway = tapnum * 2 + 1
1171 client = gateway + 1 1215 client = gateway + 1
1172 if self.fstype == 'nfs': 1216 if self.fstype == 'nfs':
@@ -1174,6 +1218,7 @@ class BaseConfig(object):
1174 netconf = " " + self.cmdline_ip_tap 1218 netconf = " " + self.cmdline_ip_tap
1175 netconf = netconf.replace('@CLIENT@', str(client)) 1219 netconf = netconf.replace('@CLIENT@', str(client))
1176 netconf = netconf.replace('@GATEWAY@', str(gateway)) 1220 netconf = netconf.replace('@GATEWAY@', str(gateway))
1221 self.nfs_server = self.nfs_server.replace('@GATEWAY@', str(gateway))
1177 logger.info("Network configuration:%s", netconf) 1222 logger.info("Network configuration:%s", netconf)
1178 self.kernel_cmdline_script += netconf 1223 self.kernel_cmdline_script += netconf
1179 mac = "%s%02x" % (self.mac_tap, client) 1224 mac = "%s%02x" % (self.mac_tap, client)
@@ -1189,7 +1234,8 @@ class BaseConfig(object):
1189 self.set('NETWORK_CMD', '%s %s' % (self.network_device.replace('@MAC@', mac), qemu_tap_opt)) 1234 self.set('NETWORK_CMD', '%s %s' % (self.network_device.replace('@MAC@', mac), qemu_tap_opt))
1190 1235
1191 def setup_network(self): 1236 def setup_network(self):
1192 if self.get('QB_NET') == 'none': 1237 if self.nonetwork or self.get('QB_NET') == 'none':
1238 self.set('NETWORK_CMD', '-nic none')
1193 return 1239 return
1194 if sys.stdin.isatty(): 1240 if sys.stdin.isatty():
1195 self.saved_stty = subprocess.check_output(("stty", "-g")).decode('utf-8').strip() 1241 self.saved_stty = subprocess.check_output(("stty", "-g")).decode('utf-8').strip()
@@ -1210,6 +1256,18 @@ class BaseConfig(object):
1210 self.fstype = self.fstype[4:] 1256 self.fstype = self.fstype[4:]
1211 rootfs_format = self.fstype if self.fstype in ('vmdk', 'vhd', 'vhdx', 'qcow2', 'vdi') else 'raw' 1257 rootfs_format = self.fstype if self.fstype in ('vmdk', 'vhd', 'vhdx', 'qcow2', 'vdi') else 'raw'
1212 1258
1259 tmpfsdir = os.environ.get("RUNQEMU_TMPFS_DIR", None)
1260 if self.snapshot and tmpfsdir:
1261 newrootfs = os.path.join(tmpfsdir, os.path.basename(self.rootfs)) + "." + str(os.getpid())
1262 logger.info("Copying rootfs to %s" % newrootfs)
1263 copy_start = time.time()
1264 shutil.copyfile(self.rootfs, newrootfs)
1265 logger.info("Copy done in %s seconds" % (time.time() - copy_start))
1266 self.rootfs = newrootfs
1267 # Don't need a second copy now!
1268 self.snapshot = False
1269 self.cleanup_files.append(newrootfs)
1270
1213 qb_rootfs_opt = self.get('QB_ROOTFS_OPT') 1271 qb_rootfs_opt = self.get('QB_ROOTFS_OPT')
1214 if qb_rootfs_opt: 1272 if qb_rootfs_opt:
1215 self.rootfs_options = qb_rootfs_opt.replace('@ROOTFS@', self.rootfs) 1273 self.rootfs_options = qb_rootfs_opt.replace('@ROOTFS@', self.rootfs)
@@ -1254,7 +1312,13 @@ class BaseConfig(object):
1254 self.rootfs_options = vm_drive 1312 self.rootfs_options = vm_drive
1255 if not self.fstype in self.vmtypes: 1313 if not self.fstype in self.vmtypes:
1256 self.rootfs_options += ' -no-reboot' 1314 self.rootfs_options += ' -no-reboot'
1257 self.kernel_cmdline = 'root=%s rw' % (self.get('QB_KERNEL_ROOT')) 1315
1316 # By default, ' rw' is appended to QB_KERNEL_ROOT unless either ro or rw is explicitly passed.
1317 qb_kernel_root = self.get('QB_KERNEL_ROOT')
1318 qb_kernel_root_l = qb_kernel_root.split()
1319 if not ('ro' in qb_kernel_root_l or 'rw' in qb_kernel_root_l):
1320 qb_kernel_root += ' rw'
1321 self.kernel_cmdline = 'root=%s' % qb_kernel_root
1258 1322
1259 if self.fstype == 'nfs': 1323 if self.fstype == 'nfs':
1260 self.rootfs_options = '' 1324 self.rootfs_options = ''
@@ -1270,7 +1334,7 @@ class BaseConfig(object):
1270 """attempt to determine the appropriate qemu-system binary""" 1334 """attempt to determine the appropriate qemu-system binary"""
1271 mach = self.get('MACHINE') 1335 mach = self.get('MACHINE')
1272 if not mach: 1336 if not mach:
1273 search = '.*(qemux86-64|qemux86|qemuarm64|qemuarm|qemumips64|qemumips64el|qemumipsel|qemumips|qemuppc).*' 1337 search = '.*(qemux86-64|qemux86|qemuarm64|qemuarm|qemuloongarch64|qemumips64|qemumips64el|qemumipsel|qemumips|qemuppc).*'
1274 if self.rootfs: 1338 if self.rootfs:
1275 match = re.match(search, self.rootfs) 1339 match = re.match(search, self.rootfs)
1276 if match: 1340 if match:
@@ -1293,6 +1357,8 @@ class BaseConfig(object):
1293 qbsys = 'x86_64' 1357 qbsys = 'x86_64'
1294 elif mach == 'qemuppc': 1358 elif mach == 'qemuppc':
1295 qbsys = 'ppc' 1359 qbsys = 'ppc'
1360 elif mach == 'qemuloongarch64':
1361 qbsys = 'loongarch64'
1296 elif mach == 'qemumips': 1362 elif mach == 'qemumips':
1297 qbsys = 'mips' 1363 qbsys = 'mips'
1298 elif mach == 'qemumips64': 1364 elif mach == 'qemumips64':
@@ -1321,7 +1387,127 @@ class BaseConfig(object):
1321 raise RunQemuError("Failed to boot, QB_SYSTEM_NAME is NULL!") 1387 raise RunQemuError("Failed to boot, QB_SYSTEM_NAME is NULL!")
1322 self.qemu_system = qemu_system 1388 self.qemu_system = qemu_system
1323 1389
1324 def setup_final(self): 1390 def check_render_nodes(self):
1391 render_hint = """If /dev/dri/renderD* is absent due to lack of suitable GPU, 'modprobe vgem' will create one suitable for mesa llvmpipe software renderer."""
1392 try:
1393 content = os.listdir("/dev/dri")
1394 nodes = [i for i in content if i.startswith('renderD')]
1395 if len(nodes) == 0:
1396 raise RunQemuError("No render nodes found in /dev/dri/: %s. %s" %(content, render_hint))
1397 for n in nodes:
1398 try:
1399 with open(os.path.join("/dev/dri", n), "w") as f:
1400 f.close()
1401 break
1402 except IOError:
1403 pass
1404 else:
1405 raise RunQemuError("None of the render nodes in /dev/dri/ are accessible: %s; you may need to add yourself to 'render' group or otherwise ensure you have read-write permissions on one of them." %(nodes))
1406 except FileNotFoundError:
1407 raise RunQemuError("/dev/dri directory does not exist; no render nodes available on this machine. %s" %(render_hint))
1408
1409 def setup_guest_agent(self):
1410 if self.guest_agent == True:
1411 self.qemu_opt += ' -chardev socket,path=' + self.guest_agent_sockpath + ',server,nowait,id=qga0 '
1412 self.qemu_opt += ' -device virtio-serial '
1413 self.qemu_opt += ' -device virtserialport,chardev=qga0,name=org.qemu.guest_agent.0 '
1414
1415 def setup_qmp(self):
1416 if self.qmp:
1417 self.qemu_opt += " -qmp %s,server,nowait" % self.qmp
1418
1419 def setup_vga(self):
1420 if self.nographic == True:
1421 if self.sdl == True:
1422 raise RunQemuError('Option nographic makes no sense alongside the sdl option.')
1423 if self.gtk == True:
1424 raise RunQemuError('Option nographic makes no sense alongside the gtk option.')
1425 self.qemu_opt += ' -nographic'
1426
1427 if self.novga == True:
1428 self.qemu_opt += ' -vga none'
1429 return
1430
1431 if (self.gl_es == True or self.gl == True) and (self.sdl == False and self.gtk == False):
1432 raise RunQemuError('Option gl/gl-es needs gtk or sdl option.')
1433
1434 # If we have no display option, we autodetect based upon what qemu supports. We
1435 # need our font setup and show-cusor below so we need to see what qemu --help says
1436 # is supported so we can pass our correct config in.
1437 if not self.nographic and not self.sdl and not self.gtk and not self.publicvnc and not self.egl_headless == True:
1438 output = subprocess.check_output([self.qemu_bin, "--help"], universal_newlines=True, env=self.qemu_environ)
1439 if "-display gtk" in output:
1440 self.gtk = True
1441 elif "-display sdl" in output:
1442 self.sdl = True
1443 else:
1444 self.qemu_opt += ' -display none'
1445
1446 if self.sdl == True or self.gtk == True or self.egl_headless == True:
1447
1448 if self.qemu_system.endswith(('i386', 'x86_64')):
1449 if self.gl or self.gl_es or self.egl_headless:
1450 self.qemu_opt += ' -device virtio-vga-gl '
1451 else:
1452 self.qemu_opt += ' -device virtio-vga '
1453
1454 self.qemu_opt += ' -display '
1455 if self.egl_headless == True:
1456 self.check_render_nodes()
1457 self.set_dri_path()
1458 self.qemu_opt += 'egl-headless,'
1459 else:
1460 if self.sdl == True:
1461 self.qemu_opt += 'sdl,'
1462 elif self.gtk == True:
1463 self.qemu_environ['FONTCONFIG_PATH'] = '/etc/fonts'
1464 self.qemu_opt += 'gtk,'
1465
1466 if self.gl == True:
1467 self.set_dri_path()
1468 self.qemu_opt += 'gl=on,'
1469 elif self.gl_es == True:
1470 self.set_dri_path()
1471 self.qemu_opt += 'gl=es,'
1472 self.qemu_opt += 'show-cursor=on'
1473
1474 self.qemu_opt += ' %s' %self.get('QB_GRAPHICS')
1475
1476 def setup_serial(self):
1477 # Setup correct kernel command line for serial
1478 if self.get('SERIAL_CONSOLES') and (self.serialstdio == True or self.serialconsole == True or self.nographic == True or self.tcpserial_portnum):
1479 for entry in self.get('SERIAL_CONSOLES').split(' '):
1480 self.kernel_cmdline_script += ' console=%s' %entry.split(';')[1]
1481
1482 # We always wants ttyS0 and ttyS1 in qemu machines (see SERIAL_CONSOLES).
1483 # If no serial or serialtcp options were specified, only ttyS0 is created
1484 # and sysvinit shows an error trying to enable ttyS1:
1485 # INIT: Id "S1" respawning too fast: disabled for 5 minutes
1486 serial_num = len(re.findall("-serial", self.qemu_opt))
1487
1488 # Assume if the user passed serial options, they know what they want
1489 # and pad to two devices
1490 if serial_num == 1:
1491 self.qemu_opt += " -serial null"
1492 elif serial_num >= 2:
1493 return
1494
1495 if self.serialstdio == True or self.nographic == True:
1496 self.qemu_opt += " -serial mon:stdio"
1497 else:
1498 self.qemu_opt += " -serial mon:vc"
1499 if self.serialconsole:
1500 if sys.stdin.isatty():
1501 subprocess.check_call(("stty", "intr", "^]"))
1502 logger.info("Interrupt character is '^]'")
1503
1504 self.qemu_opt += " %s" % self.get("QB_SERIAL_OPT")
1505
1506 serial_num = len(re.findall("-serial", self.qemu_opt))
1507 if serial_num < 2:
1508 self.qemu_opt += " -serial null"
1509
1510 def find_qemu(self):
1325 qemu_bin = os.path.join(self.bindir_native, self.qemu_system) 1511 qemu_bin = os.path.join(self.bindir_native, self.qemu_system)
1326 1512
1327 # It is possible to have qemu-native in ASSUME_PROVIDED, and it won't 1513 # It is possible to have qemu-native in ASSUME_PROVIDED, and it won't
@@ -1340,11 +1526,18 @@ class BaseConfig(object):
1340 1526
1341 if not os.access(qemu_bin, os.X_OK): 1527 if not os.access(qemu_bin, os.X_OK):
1342 raise OEPathError("No QEMU binary '%s' could be found" % qemu_bin) 1528 raise OEPathError("No QEMU binary '%s' could be found" % qemu_bin)
1529 self.qemu_bin = qemu_bin
1343 1530
1344 self.qemu_opt = "%s %s %s %s %s" % (qemu_bin, self.get('NETWORK_CMD'), self.get('QB_RNG'), self.get('ROOTFS_OPTIONS'), self.get('QB_OPT_APPEND')) 1531 def setup_final(self):
1532
1533 self.find_qemu()
1534
1535 self.qemu_opt = "%s %s %s %s %s" % (self.qemu_bin, self.get('NETWORK_CMD'), self.get('QB_RNG'), self.get('ROOTFS_OPTIONS'), self.get('QB_OPT_APPEND').replace('@DEPLOY_DIR_IMAGE@', self.get('DEPLOY_DIR_IMAGE')))
1345 1536
1346 for ovmf in self.ovmf_bios: 1537 for ovmf in self.ovmf_bios:
1347 format = ovmf.rsplit('.', 1)[-1] 1538 format = ovmf.rsplit('.', 1)[-1]
1539 if format == "bin":
1540 format = "raw"
1348 self.qemu_opt += ' -drive if=pflash,format=%s,file=%s' % (format, ovmf) 1541 self.qemu_opt += ' -drive if=pflash,format=%s,file=%s' % (format, ovmf)
1349 1542
1350 self.qemu_opt += ' ' + self.qemu_opt_script 1543 self.qemu_opt += ' ' + self.qemu_opt_script
@@ -1363,61 +1556,44 @@ class BaseConfig(object):
1363 if self.snapshot: 1556 if self.snapshot:
1364 self.qemu_opt += " -snapshot" 1557 self.qemu_opt += " -snapshot"
1365 1558
1366 if self.serialconsole: 1559 self.setup_guest_agent()
1367 if sys.stdin.isatty(): 1560 self.setup_qmp()
1368 subprocess.check_call(("stty", "intr", "^]")) 1561 self.setup_serial()
1369 logger.info("Interrupt character is '^]'") 1562 self.setup_vga()
1370
1371 first_serial = ""
1372 if not re.search("-nographic", self.qemu_opt):
1373 first_serial = "-serial mon:vc"
1374 # We always want a ttyS1. Since qemu by default adds a serial
1375 # port when nodefaults is not specified, it seems that all that
1376 # would be needed is to make sure a "-serial" is there. However,
1377 # it appears that when "-serial" is specified, it ignores the
1378 # default serial port that is normally added. So here we make
1379 # sure to add two -serial if there are none. And only one if
1380 # there is one -serial already.
1381 serial_num = len(re.findall("-serial", self.qemu_opt))
1382 if serial_num == 0:
1383 self.qemu_opt += " %s %s" % (first_serial, self.get("QB_SERIAL_OPT"))
1384 elif serial_num == 1:
1385 self.qemu_opt += " %s" % self.get("QB_SERIAL_OPT")
1386
1387 # We always wants ttyS0 and ttyS1 in qemu machines (see SERIAL_CONSOLES),
1388 # if not serial or serialtcp options was specified only ttyS0 is created
1389 # and sysvinit shows an error trying to enable ttyS1:
1390 # INIT: Id "S1" respawning too fast: disabled for 5 minutes
1391 serial_num = len(re.findall("-serial", self.qemu_opt))
1392 if serial_num == 0:
1393 if re.search("-nographic", self.qemu_opt) or self.serialstdio:
1394 self.qemu_opt += " -serial mon:stdio -serial null"
1395 else:
1396 self.qemu_opt += " -serial mon:vc -serial null"
1397 1563
1398 def start_qemu(self): 1564 def start_qemu(self):
1399 import shlex 1565 import shlex
1400 if self.kernel: 1566 if self.kernel:
1401 kernel_opts = "-kernel %s -append '%s %s %s %s'" % (self.kernel, self.kernel_cmdline, 1567 kernel_opts = "-kernel %s" % (self.kernel)
1568 if self.get('QB_KERNEL_CMDLINE') == "none":
1569 if self.bootparams:
1570 kernel_opts += " -append '%s'" % (self.bootparams)
1571 else:
1572 kernel_opts += " -append '%s %s %s %s'" % (self.kernel_cmdline,
1402 self.kernel_cmdline_script, self.get('QB_KERNEL_CMDLINE_APPEND'), 1573 self.kernel_cmdline_script, self.get('QB_KERNEL_CMDLINE_APPEND'),
1403 self.bootparams) 1574 self.bootparams)
1404 if self.bios:
1405 kernel_opts += " -bios %s" % self.bios
1406 if self.dtb: 1575 if self.dtb:
1407 kernel_opts += " -dtb %s" % self.dtb 1576 kernel_opts += " -dtb %s" % self.dtb
1408 else: 1577 else:
1409 kernel_opts = "" 1578 kernel_opts = ""
1579
1580 if self.bios:
1581 self.qemu_opt += " -bios %s" % self.bios
1582
1410 cmd = "%s %s" % (self.qemu_opt, kernel_opts) 1583 cmd = "%s %s" % (self.qemu_opt, kernel_opts)
1411 cmds = shlex.split(cmd) 1584 cmds = shlex.split(cmd)
1412 logger.info('Running %s\n' % cmd) 1585 logger.info('Running %s\n' % cmd)
1586 with open('/proc/uptime', 'r') as f:
1587 uptime_seconds = f.readline().split()[0]
1588 logger.info('Host uptime: %s\n' % uptime_seconds)
1413 pass_fds = [] 1589 pass_fds = []
1414 if self.taplock_descriptor: 1590 if self.taplock_descriptor:
1415 pass_fds = [self.taplock_descriptor.fileno()] 1591 pass_fds = [self.taplock_descriptor.fileno()]
1416 if len(self.portlocks): 1592 if len(self.portlocks):
1417 for descriptor in self.portlocks.values(): 1593 for descriptor in self.portlocks.values():
1418 pass_fds.append(descriptor.fileno()) 1594 pass_fds.append(descriptor.fileno())
1419 process = subprocess.Popen(cmds, stderr=subprocess.PIPE, pass_fds=pass_fds) 1595 process = subprocess.Popen(cmds, stderr=subprocess.PIPE, pass_fds=pass_fds, env=self.qemu_environ)
1420 self.qemupid = process.pid 1596 self.qemuprocess = process
1421 retcode = process.wait() 1597 retcode = process.wait()
1422 if retcode: 1598 if retcode:
1423 if retcode == -signal.SIGTERM: 1599 if retcode == -signal.SIGTERM:
@@ -1425,6 +1601,13 @@ class BaseConfig(object):
1425 else: 1601 else:
1426 logger.error("Failed to run qemu: %s", process.stderr.read().decode()) 1602 logger.error("Failed to run qemu: %s", process.stderr.read().decode())
1427 1603
1604 def cleanup_cmd(self):
1605 cmd = self.get('QB_CLEANUP_CMD')
1606 if cmd != '':
1607 logger.info('Running cleanup command %s' % str(cmd))
1608 if subprocess.call(cmd, shell=True) != 0:
1609 raise RunQemuError('Failed to run %s' % str(cmd))
1610
1428 def cleanup(self): 1611 def cleanup(self):
1429 if self.cleaned: 1612 if self.cleaned:
1430 return 1613 return
@@ -1433,30 +1616,48 @@ class BaseConfig(object):
1433 signal.signal(signal.SIGTERM, signal.SIG_IGN) 1616 signal.signal(signal.SIGTERM, signal.SIG_IGN)
1434 1617
1435 logger.info("Cleaning up") 1618 logger.info("Cleaning up")
1619
1620 if self.qemuprocess:
1621 try:
1622 # give it some time to shut down, ignore return values and output
1623 self.qemuprocess.send_signal(signal.SIGTERM)
1624 self.qemuprocess.communicate(timeout=5)
1625 except subprocess.TimeoutExpired:
1626 self.qemuprocess.kill()
1627
1628 with open('/proc/uptime', 'r') as f:
1629 uptime_seconds = f.readline().split()[0]
1630 logger.info('Host uptime: %s\n' % uptime_seconds)
1436 if self.cleantap: 1631 if self.cleantap:
1437 cmd = ('sudo', self.qemuifdown, self.tap, self.bindir_native) 1632 cmd = ('sudo', self.qemuifdown, self.tap)
1438 logger.debug('Running %s' % str(cmd)) 1633 logger.debug('Running %s' % str(cmd))
1439 subprocess.check_call(cmd) 1634 subprocess.check_call(cmd)
1440 self.release_taplock() 1635 self.release_taplock()
1441 self.release_portlock()
1442 1636
1443 if self.nfs_running: 1637 if self.nfs_running:
1444 logger.info("Shutting down the userspace NFS server...") 1638 logger.info("Shutting down the userspace NFS server...")
1445 cmd = ("runqemu-export-rootfs", "stop", self.rootfs) 1639 cmd = ("runqemu-export-rootfs", "stop", self.rootfs)
1446 logger.debug('Running %s' % str(cmd)) 1640 logger.debug('Running %s' % str(cmd))
1447 subprocess.check_call(cmd) 1641 subprocess.check_call(cmd)
1642 self.release_portlock()
1448 1643
1449 if self.saved_stty: 1644 if self.saved_stty:
1450 subprocess.check_call(("stty", self.saved_stty)) 1645 subprocess.check_call(("stty", self.saved_stty))
1451 1646
1452 if self.clean_nfs_dir: 1647 if self.cleanup_files:
1453 logger.info('Removing %s' % self.rootfs) 1648 for ent in self.cleanup_files:
1454 shutil.rmtree(self.rootfs) 1649 logger.info('Removing %s' % ent)
1455 shutil.rmtree('%s.pseudo_state' % self.rootfs) 1650 if os.path.isfile(ent):
1651 os.remove(ent)
1652 else:
1653 shutil.rmtree(ent)
1654
1655 # Deliberately ignore the return code of 'tput smam'.
1656 subprocess.call(["tput", "smam"])
1456 1657
1457 self.cleaned = True 1658 self.cleaned = True
1458 1659
1459 def run_bitbake_env(self, mach=None): 1660 def run_bitbake_env(self, mach=None, target=''):
1460 bitbake = shutil.which('bitbake') 1661 bitbake = shutil.which('bitbake')
1461 if not bitbake: 1662 if not bitbake:
1462 return 1663 return
@@ -1469,22 +1670,33 @@ class BaseConfig(object):
1469 multiconfig = "mc:%s" % multiconfig 1670 multiconfig = "mc:%s" % multiconfig
1470 1671
1471 if mach: 1672 if mach:
1472 cmd = 'MACHINE=%s bitbake -e %s' % (mach, multiconfig) 1673 cmd = 'MACHINE=%s bitbake -e %s %s' % (mach, multiconfig, target)
1473 else: 1674 else:
1474 cmd = 'bitbake -e %s' % multiconfig 1675 cmd = 'bitbake -e %s %s' % (multiconfig, target)
1475 1676
1476 logger.info('Running %s...' % cmd) 1677 logger.info('Running %s...' % cmd)
1477 return subprocess.check_output(cmd, shell=True).decode('utf-8') 1678 try:
1679 return subprocess.check_output(cmd, shell=True).decode('utf-8')
1680 except subprocess.CalledProcessError as err:
1681 logger.warning("Couldn't run '%s' to gather environment information, maybe the target wasn't an image name, will retry with virtual/kernel as a target:\n%s" % (cmd, err.output.decode('utf-8')))
1682 # need something with IMAGE_NAME_SUFFIX/IMAGE_LINK_NAME defined (kernel also inherits image-artifact-names.bbclass)
1683 target = 'virtual/kernel'
1684 if mach:
1685 cmd = 'MACHINE=%s bitbake -e %s %s' % (mach, multiconfig, target)
1686 else:
1687 cmd = 'bitbake -e %s %s' % (multiconfig, target)
1688 try:
1689 return subprocess.check_output(cmd, shell=True).decode('utf-8')
1690 except subprocess.CalledProcessError as err:
1691 logger.warning("Couldn't run '%s' to gather environment information, giving up with 'bitbake -e':\n%s" % (cmd, err.output.decode('utf-8')))
1692 return ''
1693
1478 1694
1479 def load_bitbake_env(self, mach=None): 1695 def load_bitbake_env(self, mach=None, target=None):
1480 if self.bitbake_e: 1696 if self.bitbake_e:
1481 return 1697 return
1482 1698
1483 try: 1699 self.bitbake_e = self.run_bitbake_env(mach=mach, target=target)
1484 self.bitbake_e = self.run_bitbake_env(mach=mach)
1485 except subprocess.CalledProcessError as err:
1486 self.bitbake_e = ''
1487 logger.warning("Couldn't run 'bitbake -e' to gather environment information:\n%s" % err.output.decode('utf-8'))
1488 1700
1489 def validate_combos(self): 1701 def validate_combos(self):
1490 if (self.fstype in self.vmtypes) and self.kernel: 1702 if (self.fstype in self.vmtypes) and self.kernel:
@@ -1514,7 +1726,7 @@ class BaseConfig(object):
1514 return result 1726 return result
1515 raise RunQemuError("Native sysroot directory %s doesn't exist" % result) 1727 raise RunQemuError("Native sysroot directory %s doesn't exist" % result)
1516 else: 1728 else:
1517 raise RunQemuError("Can't find STAGING_BINDIR_NATIVE in '%s' output" % cmd) 1729 raise RunQemuError("Can't find STAGING_BINDIR_NATIVE in '%s' output" % str(cmd))
1518 1730
1519 1731
1520def main(): 1732def main():
@@ -1530,11 +1742,8 @@ def main():
1530 subprocess.check_call([renice, str(os.getpid())]) 1742 subprocess.check_call([renice, str(os.getpid())])
1531 1743
1532 def sigterm_handler(signum, frame): 1744 def sigterm_handler(signum, frame):
1533 logger.info("SIGTERM received") 1745 logger.info("Received signal: %s" % (signum))
1534 os.kill(config.qemupid, signal.SIGTERM)
1535 config.cleanup() 1746 config.cleanup()
1536 # Deliberately ignore the return code of 'tput smam'.
1537 subprocess.call(["tput", "smam"])
1538 signal.signal(signal.SIGTERM, sigterm_handler) 1747 signal.signal(signal.SIGTERM, sigterm_handler)
1539 1748
1540 config.check_args() 1749 config.check_args()
@@ -1546,6 +1755,7 @@ def main():
1546 config.setup_network() 1755 config.setup_network()
1547 config.setup_rootfs() 1756 config.setup_rootfs()
1548 config.setup_final() 1757 config.setup_final()
1758 config.setup_cmd()
1549 config.start_qemu() 1759 config.start_qemu()
1550 except RunQemuError as err: 1760 except RunQemuError as err:
1551 logger.error(err) 1761 logger.error(err)
@@ -1555,9 +1765,8 @@ def main():
1555 traceback.print_exc() 1765 traceback.print_exc()
1556 return 1 1766 return 1
1557 finally: 1767 finally:
1768 config.cleanup_cmd()
1558 config.cleanup() 1769 config.cleanup()
1559 # Deliberately ignore the return code of 'tput smam'.
1560 subprocess.call(["tput", "smam"])
1561 1770
1562if __name__ == "__main__": 1771if __name__ == "__main__":
1563 sys.exit(main()) 1772 sys.exit(main())
diff --git a/scripts/runqemu-addptable2image b/scripts/runqemu-addptable2image
index ca29427258..87a8da3a63 100755
--- a/scripts/runqemu-addptable2image
+++ b/scripts/runqemu-addptable2image
@@ -1,6 +1,6 @@
1#!/bin/sh 1#!/bin/sh
2 2
3# Add a partion table to an ext2 image file 3# Add a partition table to an ext2 image file
4# 4#
5# Copyright (C) 2006-2007 OpenedHand Ltd. 5# Copyright (C) 2006-2007 OpenedHand Ltd.
6# 6#
diff --git a/scripts/runqemu-export-rootfs b/scripts/runqemu-export-rootfs
index 384c091713..6a8acd0d5a 100755
--- a/scripts/runqemu-export-rootfs
+++ b/scripts/runqemu-export-rootfs
@@ -34,16 +34,12 @@ if [ -z "$SYSROOT_SETUP_SCRIPT" ]; then
34 echo "Did you forget to source your build environment setup script?" 34 echo "Did you forget to source your build environment setup script?"
35 exit 1 35 exit 1
36fi 36fi
37. $SYSROOT_SETUP_SCRIPT meta-ide-support 37. $SYSROOT_SETUP_SCRIPT qemu-helper-native
38 38
39if [ ! -e "$OECORE_NATIVE_SYSROOT/usr/bin/unfsd" ]; then 39if [ ! -e "$OECORE_NATIVE_SYSROOT/usr/bin/unfsd" ]; then
40 echo "Error: Unable to find unfsd binary in $OECORE_NATIVE_SYSROOT/usr/bin/" 40 echo "Error: Unable to find unfsd binary in $OECORE_NATIVE_SYSROOT/usr/bin/"
41 41
42 if [ "x$OECORE_DISTRO_VERSION" = "x" ]; then 42 echo "This shouldn't happen - something is missing from your toolchain installation"
43 echo "Have you run 'bitbake meta-ide-support'?"
44 else
45 echo "This shouldn't happen - something is missing from your toolchain installation"
46 fi
47 exit 1 43 exit 1
48fi 44fi
49 45
@@ -74,26 +70,11 @@ MOUNTD_PORT=${MOUNTD_PORT:=$[ 3048 + 2 * $NFS_INSTANCE ]}
74 70
75## For debugging you would additionally add 71## For debugging you would additionally add
76## --debug all 72## --debug all
77UNFSD_OPTS="-p -N -i $NFSPID -e $EXPORTS -n $NFSD_PORT -m $MOUNTD_PORT" 73UNFSD_OPTS="-p -i $NFSPID -e $EXPORTS -n $NFSD_PORT -m $MOUNTD_PORT"
78 74
79# See how we were called. 75# See how we were called.
80case "$1" in 76case "$1" in
81 start) 77 start)
82 PORTMAP_RUNNING=`ps -ef | grep portmap | grep -v grep`
83 RPCBIND_RUNNING=`ps -ef | grep rpcbind | grep -v grep`
84 if [[ "x$PORTMAP_RUNNING" = "x" && "x$RPCBIND_RUNNING" = "x" ]]; then
85 echo "======================================================="
86 echo "Error: neither rpcbind nor portmap appear to be running"
87 echo "Please install and start one of these services first"
88 echo "======================================================="
89 echo "Tip: for recent Ubuntu hosts, run:"
90 echo " sudo apt-get install rpcbind"
91 echo "Then add OPTIONS=\"-i -w\" to /etc/default/rpcbind and run"
92 echo " sudo service portmap restart"
93
94 exit 1
95 fi
96
97 echo "Creating exports file..." 78 echo "Creating exports file..."
98 echo "$NFS_EXPORT_DIR (rw,no_root_squash,no_all_squash,insecure)" > $EXPORTS 79 echo "$NFS_EXPORT_DIR (rw,no_root_squash,no_all_squash,insecure)" > $EXPORTS
99 80
diff --git a/scripts/runqemu-extract-sdk b/scripts/runqemu-extract-sdk
index 9bc0c07fb8..db05da25f2 100755
--- a/scripts/runqemu-extract-sdk
+++ b/scripts/runqemu-extract-sdk
@@ -25,7 +25,7 @@ if [ -z "$SYSROOT_SETUP_SCRIPT" ]; then
25 echo "Did you forget to source your build system environment setup script?" 25 echo "Did you forget to source your build system environment setup script?"
26 exit 1 26 exit 1
27fi 27fi
28. $SYSROOT_SETUP_SCRIPT meta-ide-support 28. $SYSROOT_SETUP_SCRIPT qemu-helper-native
29PSEUDO_OPTS="-P $OECORE_NATIVE_SYSROOT/usr" 29PSEUDO_OPTS="-P $OECORE_NATIVE_SYSROOT/usr"
30 30
31ROOTFS_TARBALL=$1 31ROOTFS_TARBALL=$1
diff --git a/scripts/runqemu-gen-tapdevs b/scripts/runqemu-gen-tapdevs
index a6ee4517da..a00c79c442 100755
--- a/scripts/runqemu-gen-tapdevs
+++ b/scripts/runqemu-gen-tapdevs
@@ -1,53 +1,58 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# Create a "bank" of tap network devices that can be used by the 3# Create a "bank" of tap network devices that can be used by the
4# runqemu script. This script needs to be run as root, and will 4# runqemu script. This script needs to be run as root
5# use the tunctl binary from the build system sysroot. Note: many Linux
6# distros these days still use an older version of tunctl which does not
7# support the group permissions option, hence the need to use the build
8# system provided version.
9# 5#
10# Copyright (C) 2010 Intel Corp. 6# Copyright (C) 2010 Intel Corp.
11# 7#
12# SPDX-License-Identifier: GPL-2.0-only 8# SPDX-License-Identifier: GPL-2.0-only
13# 9#
14 10
15uid=`id -u`
16gid=`id -g` 11gid=`id -g`
17if [ -n "$SUDO_UID" ]; then
18 uid=$SUDO_UID
19fi
20if [ -n "$SUDO_GID" ]; then 12if [ -n "$SUDO_GID" ]; then
21 gid=$SUDO_GID 13 gid=$SUDO_GID
22fi 14fi
23 15
24usage() { 16usage() {
25 echo "Usage: sudo $0 <uid> <gid> <num> <staging_bindir_native>" 17 echo "Usage: sudo $0 <gid> <num>"
26 echo "Where <uid> is the numeric user id the tap devices will be owned by"
27 echo "Where <gid> is the numeric group id the tap devices will be owned by" 18 echo "Where <gid> is the numeric group id the tap devices will be owned by"
28 echo "<num> is the number of tap devices to create (0 to remove all)" 19 echo "<num> is the number of tap devices to create (0 to remove all)"
29 echo "<native-sysroot-basedir> is the path to the build system's native sysroot"
30 echo "For example:" 20 echo "For example:"
31 echo "$ bitbake qemu-helper-native" 21 echo "$ bitbake qemu-helper-native"
32 echo "$ sudo $0 $uid $gid 4 tmp/sysroots-components/x86_64/qemu-helper-native/usr/bin" 22 echo "$ sudo $0 $gid 4"
33 echo "" 23 echo ""
34 exit 1 24 exit 1
35} 25}
36 26
37if [ $# -ne 4 ]; then 27# Allow passing 4 arguments for backward compatibility with warning
28if [ $# -gt 4 ]; then
29 echo "Error: Incorrect number of arguments"
30 usage
31fi
32if [ $# -gt 3 ]; then
33 echo "Warning: Ignoring the <native-sysroot-basedir> parameter. It is no longer needed."
34fi
35if [ $# -gt 2 ]; then
36 echo "Warning: Ignoring the <uid> parameter. It is no longer needed."
37 GID=$2
38 COUNT=$3
39elif [ $# -eq 2 ]; then
40 GID=$1
41 COUNT=$2
42else
38 echo "Error: Incorrect number of arguments" 43 echo "Error: Incorrect number of arguments"
39 usage 44 usage
40fi 45fi
41 46
42TUID=$1
43GID=$2
44COUNT=$3
45STAGING_BINDIR_NATIVE=$4
46 47
47TUNCTL=$STAGING_BINDIR_NATIVE/tunctl 48if [ -z "$OE_TAP_NAME" ]; then
48if [[ ! -x "$TUNCTL" || -d "$TUNCTL" ]]; then 49 OE_TAP_NAME=tap
49 echo "Error: $TUNCTL is not an executable" 50fi
50 usage 51
52# check if COUNT is a number and >= 0
53if ! [ $COUNT -ge 0 ]; then
54 echo "Error: Incorrect count: $COUNT"
55 exit 1
51fi 56fi
52 57
53if [ $EUID -ne 0 ]; then 58if [ $EUID -ne 0 ]; then
@@ -62,48 +67,41 @@ if [ ! -x "$RUNQEMU_IFUP" ]; then
62 exit 1 67 exit 1
63fi 68fi
64 69
65IFCONFIG=`which ip 2> /dev/null` 70if interfaces=`ip tuntap list` 2>/dev/null; then
66if [ -z "$IFCONFIG" ]; then 71 interfaces=`echo "$interfaces" |cut -f1 -d: |grep -E "^$OE_TAP_NAME.*"`
67 # Is it ever anywhere else?
68 IFCONFIG=/sbin/ip
69fi
70if [ ! -x "$IFCONFIG" ]; then
71 echo "$IFCONFIG cannot be executed"
72 exit 1
73fi
74
75if [ $COUNT -ge 0 ]; then
76 # Ensure we start with a clean slate
77 for tap in `$IFCONFIG link | grep tap | awk '{ print \$2 }' | sed s/://`; do
78 echo "Note: Destroying pre-existing tap interface $tap..."
79 $TUNCTL -d $tap
80 done
81 rm -f /etc/runqemu-nosudo
82else 72else
83 echo "Error: Incorrect count: $COUNT" 73 echo "Failed to call 'ip tuntap list'" >&2
84 exit 1 74 exit 1
85fi 75fi
86 76
87if [ $COUNT -gt 0 ]; then 77# Ensure we start with a clean slate
88 echo "Creating $COUNT tap devices for UID: $TUID GID: $GID..." 78for tap in $interfaces; do
89 for ((index=0; index < $COUNT; index++)); do 79 echo "Note: Destroying pre-existing tap interface $tap..."
90 echo "Creating tap$index" 80 ip tuntap del $tap mode tap
91 ifup=`$RUNQEMU_IFUP $TUID $GID $STAGING_BINDIR_NATIVE 2>&1` 81done
92 if [ $? -ne 0 ]; then 82rm -f /etc/runqemu-nosudo
93 echo "Error running tunctl: $ifup"
94 exit 1
95 fi
96 done
97 83
98 echo "Note: For systems running NetworkManager, it's recommended" 84if [ $COUNT -eq 0 ]; then
99 echo "Note: that the tap devices be set as unmanaged in the" 85 exit 0
100 echo "Note: NetworkManager.conf file. Add the following lines to"
101 echo "Note: /etc/NetworkManager/NetworkManager.conf"
102 echo "[keyfile]"
103 echo "unmanaged-devices=interface-name:tap*"
104
105 # The runqemu script will check for this file, and if it exists,
106 # will use the existing bank of tap devices without creating
107 # additional ones via sudo.
108 touch /etc/runqemu-nosudo
109fi 86fi
87
88echo "Creating $COUNT tap devices for GID: $GID..."
89for ((index=0; index < $COUNT; index++)); do
90 echo "Creating $OE_TAP_NAME$index"
91 if ! ifup=`$RUNQEMU_IFUP $GID 2>&1`; then
92 echo "Error bringing up interface: $ifup"
93 exit 1
94 fi
95done
96
97echo "Note: For systems running NetworkManager, it's recommended"
98echo "Note: that the tap devices be set as unmanaged in the"
99echo "Note: NetworkManager.conf file. Add the following lines to"
100echo "Note: /etc/NetworkManager/NetworkManager.conf"
101echo "[keyfile]"
102echo "unmanaged-devices=interface-name:$OE_TAP_NAME*"
103
104# The runqemu script will check for this file, and if it exists,
105# will use the existing bank of tap devices without creating
106# additional ones via sudo.
107touch /etc/runqemu-nosudo
diff --git a/scripts/runqemu-ifdown b/scripts/runqemu-ifdown
index a104c37bf8..822a2a39b9 100755
--- a/scripts/runqemu-ifdown
+++ b/scripts/runqemu-ifdown
@@ -1,8 +1,7 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# QEMU network configuration script to bring down tap devices. This 3# QEMU network configuration script to bring down tap devices. This
4# utility needs to be run as root, and will use the tunctl binary 4# utility needs to be run as root, and will use the ip utility
5# from the native sysroot.
6# 5#
7# If you find yourself calling this script a lot, you can add the 6# If you find yourself calling this script a lot, you can add the
8# the following to your /etc/sudoers file to be able to run this 7# the following to your /etc/sudoers file to be able to run this
@@ -17,7 +16,7 @@
17# 16#
18 17
19usage() { 18usage() {
20 echo "sudo $(basename $0) <tap-dev> <native-sysroot-basedir>" 19 echo "sudo $(basename $0) <tap-dev>"
21} 20}
22 21
23if [ $EUID -ne 0 ]; then 22if [ $EUID -ne 0 ]; then
@@ -25,30 +24,31 @@ if [ $EUID -ne 0 ]; then
25 exit 1 24 exit 1
26fi 25fi
27 26
28if [ $# -ne 2 ]; then 27if [ $# -gt 2 ] || [ $# -lt 1 ]; then
29 usage 28 usage
30 exit 1 29 exit 1
31fi 30fi
32 31
32# backward compatibility
33if [ $# -eq 2 ] ; then
34 echo "Warning: native-sysroot-basedir parameter is ignored. It is no longer needed." >&2
35fi
36
33TAP=$1 37TAP=$1
34STAGING_BINDIR_NATIVE=$2
35 38
36TUNCTL=$STAGING_BINDIR_NATIVE/tunctl 39if ! ip tuntap del $TAP mode tap 2>/dev/null; then
37if [ ! -e "$TUNCTL" ]; then 40 echo "Error: Unable to run up tuntap del"
38 echo "Error: Unable to find tunctl binary in '$STAGING_BINDIR_NATIVE', please bitbake qemu-helper-native"
39 exit 1 41 exit 1
40fi 42fi
41 43
42$TUNCTL -d $TAP 44IPTOOL=`which ip 2> /dev/null`
43 45if [ "x$IPTOOL" = "x" ]; then
44IFCONFIG=`which ip 2> /dev/null`
45if [ "x$IFCONFIG" = "x" ]; then
46 # better than nothing... 46 # better than nothing...
47 IFCONFIG=/sbin/ip 47 IPTOOL=/sbin/ip
48fi 48fi
49if [ -x "$IFCONFIG" ]; then 49if [ -x "$IPTOOL" ]; then
50 if `$IFCONFIG link show $TAP > /dev/null 2>&1`; then 50 if `$IPTOOL link show $TAP > /dev/null 2>&1`; then
51 $IFCONFIG link del $TAP 51 $IPTOOL link del $TAP
52 fi 52 fi
53fi 53fi
54# cleanup the remaining iptables rules 54# cleanup the remaining iptables rules
@@ -60,7 +60,13 @@ if [ ! -x "$IPTABLES" ]; then
60 echo "$IPTABLES cannot be executed" 60 echo "$IPTABLES cannot be executed"
61 exit 1 61 exit 1
62fi 62fi
63n=$[ (`echo $TAP | sed 's/tap//'` * 2) + 1 ] 63
64dest=$[ (`echo $TAP | sed 's/tap//'` * 2) + 2 ] 64if [ -z "$OE_TAP_NAME" ]; then
65 OE_TAP_NAME=tap
66fi
67
68n=$[ (`echo $TAP | sed "s/$OE_TAP_NAME//"` * 2) + 1 ]
69dest=$[ (`echo $TAP | sed "s/$OE_TAP_NAME//"` * 2) + 2 ]
65$IPTABLES -D POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$n/32 70$IPTABLES -D POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$n/32
66$IPTABLES -D POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$dest/32 71$IPTABLES -D POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$dest/32
72true
diff --git a/scripts/runqemu-ifup b/scripts/runqemu-ifup
index bb661740c5..05c9325b6b 100755
--- a/scripts/runqemu-ifup
+++ b/scripts/runqemu-ifup
@@ -1,10 +1,7 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# QEMU network interface configuration script. This utility needs to 3# QEMU network interface configuration script. This utility needs to
4# be run as root, and will use the tunctl binary from a native sysroot. 4# be run as root, and will use the ip utility
5# Note: many Linux distros these days still use an older version of
6# tunctl which does not support the group permissions option, hence
7# the need to use build system's version.
8# 5#
9# If you find yourself calling this script a lot, you can add the 6# If you find yourself calling this script a lot, you can add the
10# the following to your /etc/sudoers file to be able to run this 7# the following to your /etc/sudoers file to be able to run this
@@ -24,7 +21,7 @@
24# 21#
25 22
26usage() { 23usage() {
27 echo "sudo $(basename $0) <uid> <gid> <native-sysroot-basedir>" 24 echo "sudo $(basename $0) <gid>"
28} 25}
29 26
30if [ $EUID -ne 0 ]; then 27if [ $EUID -ne 0 ]; then
@@ -32,41 +29,43 @@ if [ $EUID -ne 0 ]; then
32 exit 1 29 exit 1
33fi 30fi
34 31
35if [ $# -ne 3 ]; then 32if [ $# -eq 2 ]; then
33 echo "Warning: uid parameter is ignored. It is no longer needed." >&2
34 GROUP="$2"
35elif [ $# -eq 1 ]; then
36 GROUP="$1"
37else
36 usage 38 usage
37 exit 1 39 exit 1
38fi 40fi
39 41
40USERID="-u $1"
41GROUP="-g $2"
42STAGING_BINDIR_NATIVE=$3
43 42
44TUNCTL=$STAGING_BINDIR_NATIVE/tunctl 43if [ -z "$OE_TAP_NAME" ]; then
45if [ ! -x "$TUNCTL" ]; then 44 OE_TAP_NAME=tap
46 echo "Error: Unable to find tunctl binary in '$STAGING_BINDIR_NATIVE', please bitbake qemu-helper-native"
47 exit 1
48fi 45fi
49 46
50TAP=`$TUNCTL -b $GROUP 2>&1` 47if taps=$(ip tuntap list 2>/dev/null); then
51STATUS=$? 48 tap_no_last=$(echo "$taps" |cut -f 1 -d ":" |grep -E "^$OE_TAP_NAME.*" |sed "s/$OE_TAP_NAME//g" | sort -rn | head -n 1)
52if [ $STATUS -ne 0 ]; then 49 if [ -z "$tap_no_last" ]; then
53# If tunctl -g fails, try using tunctl -u, for older host kernels 50 tap_no=0
54# which do not support the TUNSETGROUP ioctl 51 else
55 TAP=`$TUNCTL -b $USERID 2>&1` 52 tap_no=$(("$tap_no_last" + 1))
56 STATUS=$?
57 if [ $STATUS -ne 0 ]; then
58 echo "tunctl failed:"
59 exit 1
60 fi 53 fi
54 ip tuntap add "$OE_TAP_NAME$tap_no" mode tap group "$GROUP" && TAP=$OE_TAP_NAME$tap_no
55fi
56
57if [ -z "$TAP" ]; then
58 echo "Error: Unable to find a tap device to use"
59 exit 1
61fi 60fi
62 61
63IFCONFIG=`which ip 2> /dev/null` 62IPTOOL=`which ip 2> /dev/null`
64if [ "x$IFCONFIG" = "x" ]; then 63if [ "x$IPTOOL" = "x" ]; then
65 # better than nothing... 64 # better than nothing...
66 IFCONFIG=/sbin/ip 65 IPTOOL=/sbin/ip
67fi 66fi
68if [ ! -x "$IFCONFIG" ]; then 67if [ ! -x "$IPTOOL" ]; then
69 echo "$IFCONFIG cannot be executed" 68 echo "$IPTOOL cannot be executed"
70 exit 1 69 exit 1
71fi 70fi
72 71
@@ -79,22 +78,22 @@ if [ ! -x "$IPTABLES" ]; then
79 exit 1 78 exit 1
80fi 79fi
81 80
82n=$[ (`echo $TAP | sed 's/tap//'` * 2) + 1 ] 81n=$[ (`echo $TAP | sed "s/$OE_TAP_NAME//"` * 2) + 1 ]
83$IFCONFIG addr add 192.168.7.$n/32 broadcast 192.168.7.255 dev $TAP 82$IPTOOL addr add 192.168.7.$n/32 broadcast 192.168.7.255 dev $TAP
84STATUS=$? 83STATUS=$?
85if [ $STATUS -ne 0 ]; then 84if [ $STATUS -ne 0 ]; then
86 echo "Failed to set up IP addressing on $TAP" 85 echo "Failed to set up IP addressing on $TAP"
87 exit 1 86 exit 1
88fi 87fi
89$IFCONFIG link set dev $TAP up 88$IPTOOL link set dev $TAP up
90STATUS=$? 89STATUS=$?
91if [ $STATUS -ne 0 ]; then 90if [ $STATUS -ne 0 ]; then
92 echo "Failed to bring up $TAP" 91 echo "Failed to bring up $TAP"
93 exit 1 92 exit 1
94fi 93fi
95 94
96dest=$[ (`echo $TAP | sed 's/tap//'` * 2) + 2 ] 95dest=$[ (`echo $TAP | sed "s/$OE_TAP_NAME//"` * 2) + 2 ]
97$IFCONFIG route add to 192.168.7.$dest dev $TAP 96$IPTOOL route add to 192.168.7.$dest dev $TAP
98STATUS=$? 97STATUS=$?
99if [ $STATUS -ne 0 ]; then 98if [ $STATUS -ne 0 ]; then
100 echo "Failed to add route to 192.168.7.$dest using $TAP" 99 echo "Failed to add route to 192.168.7.$dest using $TAP"
diff --git a/scripts/runqemu.README b/scripts/runqemu.README
index da9abd7dfb..e5f4b4634c 100644
--- a/scripts/runqemu.README
+++ b/scripts/runqemu.README
@@ -1,12 +1,12 @@
1Using OE images with QEMU 1Using OE images with QEMU
2========================= 2=========================
3 3
4OE-Core can generate qemu bootable kernels and images with can be used 4OE-Core can generate qemu bootable kernels and images which can be used
5on a desktop system. The scripts currently support booting ARM, MIPS, PowerPC 5on a desktop system. The scripts currently support booting ARM, MIPS, PowerPC
6and x86 (32 and 64 bit) images. The scripts can be used within the OE build 6and x86 (32 and 64 bit) images. The scripts can be used within the OE build
7system or externaly. 7system or externally.
8 8
9The runqemu script is run as: 9The runqemu script is run as:
10 10
11 runqemu <machine> <zimage> <filesystem> 11 runqemu <machine> <zimage> <filesystem>
12 12
@@ -15,13 +15,13 @@ where:
15 <machine> is the machine/architecture to use (qemuarm/qemumips/qemuppc/qemux86/qemux86-64) 15 <machine> is the machine/architecture to use (qemuarm/qemumips/qemuppc/qemux86/qemux86-64)
16 <zimage> is the path to a kernel (e.g. zimage-qemuarm.bin) 16 <zimage> is the path to a kernel (e.g. zimage-qemuarm.bin)
17 <filesystem> is the path to an ext2 image (e.g. filesystem-qemuarm.ext2) or an nfs directory 17 <filesystem> is the path to an ext2 image (e.g. filesystem-qemuarm.ext2) or an nfs directory
18 18
19If <machine> isn't specified, the script will try to detect the machine name 19If <machine> isn't specified, the script will try to detect the machine name
20from the name of the <zimage> file. 20from the name of the <zimage> file.
21 21
22If <filesystem> isn't specified, nfs booting will be assumed. 22If <filesystem> isn't specified, nfs booting will be assumed.
23 23
24When used within the build system, it will default to qemuarm, ext2 and the last kernel and 24When used within the build system, it will default to qemuarm, ext2 and the last kernel and
25core-image-sato-sdk image built by the build system. If an sdk image isn't present it will look 25core-image-sato-sdk image built by the build system. If an sdk image isn't present it will look
26for sato and minimal images. 26for sato and minimal images.
27 27
@@ -31,7 +31,7 @@ Full usage instructions can be seen by running the command with no options speci
31Notes 31Notes
32===== 32=====
33 33
34 - The scripts run qemu using sudo. Change perms on /dev/net/tun to 34 - The scripts run qemu using sudo. Change perms on /dev/net/tun to
35 run as non root. The runqemu-gen-tapdevs script can also be used by 35 run as non root. The runqemu-gen-tapdevs script can also be used by
36 root to prepopulate the appropriate network devices. 36 root to prepopulate the appropriate network devices.
37 - You can access the host computer at 192.168.7.1 within the image. 37 - You can access the host computer at 192.168.7.1 within the image.
diff --git a/scripts/sstate-cache-management.py b/scripts/sstate-cache-management.py
new file mode 100755
index 0000000000..d3f600bd28
--- /dev/null
+++ b/scripts/sstate-cache-management.py
@@ -0,0 +1,329 @@
1#!/usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8import argparse
9import os
10import re
11import sys
12
13from collections import defaultdict
14from concurrent.futures import ThreadPoolExecutor
15from dataclasses import dataclass
16from pathlib import Path
17
18if sys.version_info < (3, 8, 0):
19 raise RuntimeError("Sorry, python 3.8.0 or later is required for this script.")
20
21SSTATE_PREFIX = "sstate:"
22SSTATE_EXTENSION = ".tar.zst"
23# SSTATE_EXTENSION = ".tgz"
24# .siginfo.done files are mentioned in the original script?
25SSTATE_SUFFIXES = (
26 SSTATE_EXTENSION,
27 f"{SSTATE_EXTENSION}.siginfo",
28 f"{SSTATE_EXTENSION}.done",
29)
30
31RE_SSTATE_PKGSPEC = re.compile(
32 rf"""sstate:(?P<pn>[^:]*):
33 (?P<package_target>[^:]*):
34 (?P<pv>[^:]*):
35 (?P<pr>[^:]*):
36 (?P<sstate_pkgarch>[^:]*):
37 (?P<sstate_version>[^_]*):
38 (?P<bb_unihash>[^_]*)_
39 (?P<bb_task>[^:]*)
40 (?P<ext>({"|".join([re.escape(s) for s in SSTATE_SUFFIXES])}))$""",
41 re.X,
42)
43
44
45# Really we'd like something like a Path subclass which implements a stat
46# cache here, unfortunately there's no good way to do that transparently
47# (yet); see:
48#
49# https://github.com/python/cpython/issues/70219
50# https://discuss.python.org/t/make-pathlib-extensible/3428/77
51@dataclass
52class SstateEntry:
53 """Class for keeping track of an entry in sstate-cache."""
54
55 path: Path
56 match: re.Match
57 stat_result: os.stat_result = None
58
59 def __hash__(self):
60 return self.path.__hash__()
61
62 def __getattr__(self, name):
63 return self.match.group(name)
64
65
66# this is what's in the original script; as far as I can tell, it's an
67# implementation artefact which we don't need?
68def find_archs():
69 # all_archs
70 builder_arch = os.uname().machine
71
72 # FIXME
73 layer_paths = [Path("../..")]
74
75 tune_archs = set()
76 re_tune = re.compile(r'AVAILTUNES .*=.*"(.*)"')
77 for path in layer_paths:
78 for tunefile in [
79 p for p in path.glob("meta*/conf/machine/include/**/*") if p.is_file()
80 ]:
81 with open(tunefile) as f:
82 for line in f:
83 m = re_tune.match(line)
84 if m:
85 tune_archs.update(m.group(1).split())
86
87 # all_machines
88 machine_archs = set()
89 for path in layer_paths:
90 for machine_file in path.glob("meta*/conf/machine/*.conf"):
91 machine_archs.add(machine_file.parts[-1][:-5])
92
93 extra_archs = set()
94 all_archs = (
95 set(
96 arch.replace("-", "_")
97 for arch in machine_archs | tune_archs | set(["allarch", builder_arch])
98 )
99 | extra_archs
100 )
101
102 print(all_archs)
103
104
105# again, not needed?
106def find_tasks():
107 print(set([p.bb_task for p in paths]))
108
109
110def collect_sstate_paths(args):
111 def scandir(path, paths):
112 # Assume everything is a directory; by not checking we avoid needing an
113 # additional stat which is potentially a synchronous roundtrip over NFS
114 try:
115 for p in path.iterdir():
116 filename = p.parts[-1]
117 if filename.startswith(SSTATE_PREFIX):
118 if filename.endswith(SSTATE_SUFFIXES):
119 m = RE_SSTATE_PKGSPEC.match(p.parts[-1])
120 assert m
121 paths.add(SstateEntry(p, m))
122 # ignore other things (includes things like lockfiles)
123 else:
124 scandir(p, paths)
125
126 except NotADirectoryError:
127 pass
128
129 paths = set()
130 # TODO: parellise scandir
131 scandir(Path(args.cache_dir), paths)
132
133 def path_stat(p):
134 p.stat_result = p.path.lstat()
135
136 if args.remove_duplicated:
137 # This is probably slightly performance negative on a local filesystem
138 # when we interact with the GIL; over NFS it's a massive win.
139 with ThreadPoolExecutor(max_workers=args.jobs) as executor:
140 executor.map(path_stat, paths)
141
142 return paths
143
144
145def remove_by_stamps(args, paths):
146 all_sums = set()
147 for stamps_dir in args.stamps_dir:
148 stamps_path = Path(stamps_dir)
149 assert stamps_path.is_dir()
150 re_sigdata = re.compile(r"do_.*\.sigdata\.([^.]*)")
151 all_sums |= set(
152 [
153 re_sigdata.search(x.parts[-1]).group(1)
154 for x in stamps_path.glob("*/*/*.do_*.sigdata.*")
155 ]
156 )
157 re_setscene = re.compile(r"do_.*_setscene\.([^.]*)")
158 all_sums |= set(
159 [
160 re_setscene.search(x.parts[-1]).group(1)
161 for x in stamps_path.glob("*/*/*.do_*_setscene.*")
162 ]
163 )
164 return [p for p in paths if p.bb_unihash not in all_sums]
165
166
167def remove_duplicated(args, paths):
168 # Skip populate_lic as it produces duplicates in a normal build
169 #
170 # 9ae16469e707 sstate-cache-management: skip populate_lic archives when removing duplicates
171 valid_paths = [p for p in paths if p.bb_task != "populate_lic"]
172
173 keep = dict()
174 remove = list()
175 for p in valid_paths:
176 sstate_sig = ":".join([p.pn, p.sstate_pkgarch, p.bb_task, p.ext])
177 if sstate_sig not in keep:
178 keep[sstate_sig] = p
179 elif p.stat_result.st_mtime > keep[sstate_sig].stat_result.st_mtime:
180 remove.append(keep[sstate_sig])
181 keep[sstate_sig] = p
182 else:
183 remove.append(p)
184
185 return remove
186
187
188def remove_orphans(args, paths):
189 remove = list()
190 pathsigs = defaultdict(list)
191 for p in paths:
192 sstate_sig = ":".join([p.pn, p.sstate_pkgarch, p.bb_task])
193 pathsigs[sstate_sig].append(p)
194 for k, v in pathsigs.items():
195 if len([p for p in v if p.ext == SSTATE_EXTENSION]) == 0:
196 remove.extend(v)
197 return remove
198
199
200def parse_arguments():
201 parser = argparse.ArgumentParser(description="sstate cache management utility.")
202
203 parser.add_argument(
204 "--cache-dir",
205 default=os.environ.get("SSTATE_CACHE_DIR"),
206 help="""Specify sstate cache directory, will use the environment
207 variable SSTATE_CACHE_DIR if it is not specified.""",
208 )
209
210 # parser.add_argument(
211 # "--extra-archs",
212 # help="""Specify list of architectures which should be tested, this list
213 # will be extended with native arch, allarch and empty arch. The
214 # script won't be trying to generate list of available archs from
215 # AVAILTUNES in tune files.""",
216 # )
217
218 # parser.add_argument(
219 # "--extra-layer",
220 # help="""Specify the layer which will be used for searching the archs,
221 # it will search the meta and meta-* layers in the top dir by
222 # default, and will search meta, meta-*, <layer1>, <layer2>,
223 # ...<layern> when specified. Use "," as the separator.
224 #
225 # This is useless for --stamps-dir or when --extra-archs is used.""",
226 # )
227
228 parser.add_argument(
229 "-d",
230 "--remove-duplicated",
231 action="store_true",
232 help="""Remove the duplicated sstate cache files of one package, only
233 the newest one will be kept. The duplicated sstate cache files
234 of one package must have the same arch, which means sstate cache
235 files with multiple archs are not considered duplicate.
236
237 Conflicts with --stamps-dir.""",
238 )
239
240 parser.add_argument(
241 "--remove-orphans",
242 action="store_true",
243 help=f"""Remove orphan siginfo files from the sstate cache, i.e. those
244 where this is no {SSTATE_EXTENSION} file but there are associated
245 tracking files.""",
246 )
247
248 parser.add_argument(
249 "--stamps-dir",
250 action="append",
251 help="""Specify the build directory's stamps directories, the sstate
252 cache file which IS USED by these build diretories will be KEPT,
253 other sstate cache files in cache-dir will be removed. Can be
254 specified multiple times for several directories.
255
256 Conflicts with --remove-duplicated.""",
257 )
258
259 parser.add_argument(
260 "-j", "--jobs", default=8, type=int, help="Run JOBS jobs in parallel."
261 )
262
263 # parser.add_argument(
264 # "-L",
265 # "--follow-symlink",
266 # action="store_true",
267 # help="Remove both the symbol link and the destination file, default: no.",
268 # )
269
270 parser.add_argument(
271 "-y",
272 "--yes",
273 action="store_true",
274 help="""Automatic yes to prompts; assume "yes" as answer to all prompts
275 and run non-interactively.""",
276 )
277
278 parser.add_argument(
279 "-v", "--verbose", action="store_true", help="Explain what is being done."
280 )
281
282 parser.add_argument(
283 "-D",
284 "--debug",
285 action="count",
286 default=0,
287 help="Show debug info, repeat for more debug info.",
288 )
289
290 args = parser.parse_args()
291 if args.cache_dir is None or (
292 not args.remove_duplicated and not args.stamps_dir and not args.remove_orphans
293 ):
294 parser.print_usage()
295 sys.exit(1)
296
297 return args
298
299
300def main():
301 args = parse_arguments()
302
303 paths = collect_sstate_paths(args)
304 if args.remove_duplicated:
305 remove = remove_duplicated(args, paths)
306 elif args.stamps_dir:
307 remove = remove_by_stamps(args, paths)
308 else:
309 remove = list()
310
311 if args.remove_orphans:
312 remove = set(remove) | set(remove_orphans(args, paths))
313
314 if args.debug >= 1:
315 print("\n".join([str(p.path) for p in remove]))
316 print(f"{len(remove)} out of {len(paths)} files will be removed!")
317 if not args.yes:
318 print("Do you want to continue (y/n)?")
319 confirm = input() in ("y", "Y")
320 else:
321 confirm = True
322 if confirm:
323 # TODO: parallelise remove
324 for p in remove:
325 p.path.unlink()
326
327
328if __name__ == "__main__":
329 main()
diff --git a/scripts/sstate-cache-management.sh b/scripts/sstate-cache-management.sh
deleted file mode 100755
index f1706a2229..0000000000
--- a/scripts/sstate-cache-management.sh
+++ /dev/null
@@ -1,458 +0,0 @@
1#!/bin/bash
2
3# Copyright (c) 2012 Wind River Systems, Inc.
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8# Global vars
9cache_dir=
10confirm=
11fsym=
12total_deleted=0
13verbose=
14debug=0
15
16usage () {
17 cat << EOF
18Welcome to sstate cache management utilities.
19sstate-cache-management.sh <OPTION>
20
21Options:
22 -h, --help
23 Display this help and exit.
24
25 --cache-dir=<sstate cache dir>
26 Specify sstate cache directory, will use the environment
27 variable SSTATE_CACHE_DIR if it is not specified.
28
29 --extra-archs=<arch1>,<arch2>...<archn>
30 Specify list of architectures which should be tested, this list
31 will be extended with native arch, allarch and empty arch. The
32 script won't be trying to generate list of available archs from
33 AVAILTUNES in tune files.
34
35 --extra-layer=<layer1>,<layer2>...<layern>
36 Specify the layer which will be used for searching the archs,
37 it will search the meta and meta-* layers in the top dir by
38 default, and will search meta, meta-*, <layer1>, <layer2>,
39 ...<layern> when specified. Use "," as the separator.
40
41 This is useless for --stamps-dir or when --extra-archs is used.
42
43 -d, --remove-duplicated
44 Remove the duplicated sstate cache files of one package, only
45 the newest one will be kept. The duplicated sstate cache files
46 of one package must have the same arch, which means sstate cache
47 files with multiple archs are not considered duplicate.
48
49 Conflicts with --stamps-dir.
50
51 --stamps-dir=<dir1>,<dir2>...<dirn>
52 Specify the build directory's stamps directories, the sstate
53 cache file which IS USED by these build diretories will be KEPT,
54 other sstate cache files in cache-dir will be removed. Use ","
55 as the separator. For example:
56 --stamps-dir=build1/tmp/stamps,build2/tmp/stamps
57
58 Conflicts with --remove-duplicated.
59
60 -L, --follow-symlink
61 Remove both the symbol link and the destination file, default: no.
62
63 -y, --yes
64 Automatic yes to prompts; assume "yes" as answer to all prompts
65 and run non-interactively.
66
67 -v, --verbose
68 Explain what is being done.
69
70 -D, --debug
71 Show debug info, repeat for more debug info.
72
73EOF
74}
75
76if [ $# -lt 1 ]; then
77 usage
78 exit 0
79fi
80
81# Echo no files to remove
82no_files () {
83 echo No files to remove
84}
85
86# Echo nothing to do
87do_nothing () {
88 echo Nothing to do
89}
90
91# Read the input "y"
92read_confirm () {
93 echo "$total_deleted out of $total_files files will be removed! "
94 if [ "$confirm" != "y" ]; then
95 echo "Do you want to continue (y/n)? "
96 while read confirm; do
97 [ "$confirm" = "Y" -o "$confirm" = "y" -o "$confirm" = "n" \
98 -o "$confirm" = "N" ] && break
99 echo "Invalid input \"$confirm\", please input 'y' or 'n': "
100 done
101 else
102 echo
103 fi
104}
105
106# Print error information and exit.
107echo_error () {
108 echo "ERROR: $1" >&2
109 exit 1
110}
111
112# Generate the remove list:
113#
114# * Add .done/.siginfo to the remove list
115# * Add destination of symlink to the remove list
116#
117# $1: output file, others: sstate cache file (.tgz)
118gen_rmlist (){
119 local rmlist_file="$1"
120 shift
121 local files="$@"
122 for i in $files; do
123 echo $i >> $rmlist_file
124 # Add the ".siginfo"
125 if [ -e $i.siginfo ]; then
126 echo $i.siginfo >> $rmlist_file
127 fi
128 # Add the destination of symlink
129 if [ -L "$i" ]; then
130 if [ "$fsym" = "y" ]; then
131 dest="`readlink -e $i`"
132 if [ -n "$dest" ]; then
133 echo $dest >> $rmlist_file
134 # Remove the .siginfo when .tgz is removed
135 if [ -f "$dest.siginfo" ]; then
136 echo $dest.siginfo >> $rmlist_file
137 fi
138 fi
139 fi
140 # Add the ".tgz.done" and ".siginfo.done" (may exist in the future)
141 base_fn="${i##/*/}"
142 t_fn="$base_fn.done"
143 s_fn="$base_fn.siginfo.done"
144 for d in $t_fn $s_fn; do
145 if [ -f $cache_dir/$d ]; then
146 echo $cache_dir/$d >> $rmlist_file
147 fi
148 done
149 fi
150 done
151}
152
153# Remove the duplicated cache files for the pkg, keep the newest one
154remove_duplicated () {
155
156 local topdir
157 local oe_core_dir
158 local tunedirs
159 local all_archs
160 local all_machines
161 local ava_archs
162 local arch
163 local file_names
164 local sstate_files_list
165 local fn_tmp
166 local list_suffix=`mktemp` || exit 1
167
168 if [ -z "$extra_archs" ] ; then
169 # Find out the archs in all the layers
170 echo "Figuring out the archs in the layers ... "
171 oe_core_dir=$(dirname $(dirname $(readlink -e $0)))
172 topdir=$(dirname $oe_core_dir)
173 tunedirs="`find $topdir/meta* ${oe_core_dir}/meta* $layers -path '*/meta*/conf/machine/include' 2>/dev/null`"
174 [ -n "$tunedirs" ] || echo_error "Can't find the tune directory"
175 all_machines="`find $topdir/meta* ${oe_core_dir}/meta* $layers -path '*/meta*/conf/machine/*' -name '*.conf' 2>/dev/null | sed -e 's/.*\///' -e 's/.conf$//'`"
176 all_archs=`grep -r -h "^AVAILTUNES .*=" $tunedirs | sed -e 's/.*=//' -e 's/\"//g'`
177 fi
178
179 # Use the "_" to substitute "-", e.g., x86-64 to x86_64, but not for extra_archs which can be something like cortexa9t2-vfp-neon
180 # Sort to remove the duplicated ones
181 # Add allarch and builder arch (native)
182 builder_arch=$(uname -m)
183 all_archs="$(echo allarch $all_archs $all_machines $builder_arch \
184 | sed -e 's/-/_/g' -e 's/ /\n/g' | sort -u) $extra_archs"
185 echo "Done"
186
187 # Total number of files including sstate-, .siginfo and .done files
188 total_files=`find $cache_dir -name 'sstate*' | wc -l`
189 # Save all the sstate files in a file
190 sstate_files_list=`mktemp` || exit 1
191 find $cache_dir -name 'sstate:*:*:*:*:*:*:*.tgz*' >$sstate_files_list
192
193 echo "Figuring out the suffixes in the sstate cache dir ... "
194 sstate_suffixes="`sed 's%.*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^_]*_\([^:]*\)\.tgz.*%\1%g' $sstate_files_list | sort -u`"
195 echo "Done"
196 echo "The following suffixes have been found in the cache dir:"
197 echo $sstate_suffixes
198
199 echo "Figuring out the archs in the sstate cache dir ... "
200 # Using this SSTATE_PKGSPEC definition it's 6th colon separated field
201 # SSTATE_PKGSPEC = "sstate:${PN}:${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}:${PV}:${PR}:${SSTATE_PKGARCH}:${SSTATE_VERSION}:"
202 for arch in $all_archs; do
203 grep -q ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:$arch:[^:]*:[^:]*\.tgz$" $sstate_files_list
204 [ $? -eq 0 ] && ava_archs="$ava_archs $arch"
205 # ${builder_arch}_$arch used by toolchain sstate
206 grep -q ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:${builder_arch}_$arch:[^:]*:[^:]*\.tgz$" $sstate_files_list
207 [ $? -eq 0 ] && ava_archs="$ava_archs ${builder_arch}_$arch"
208 done
209 echo "Done"
210 echo "The following archs have been found in the cache dir:"
211 echo $ava_archs
212 echo ""
213
214 # Save the file list which needs to be removed
215 local remove_listdir=`mktemp -d` || exit 1
216 for suffix in $sstate_suffixes; do
217 if [ "$suffix" = "populate_lic" ] ; then
218 echo "Skipping populate_lic, because removing duplicates doesn't work correctly for them (use --stamps-dir instead)"
219 continue
220 fi
221 # Total number of files including .siginfo and .done files
222 total_files_suffix=`grep ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:_]*_$suffix\.tgz.*" $sstate_files_list | wc -l 2>/dev/null`
223 total_tgz_suffix=`grep ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:_]*_$suffix\.tgz$" $sstate_files_list | wc -l 2>/dev/null`
224 # Save the file list to a file, some suffix's file may not exist
225 grep ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:_]*_$suffix\.tgz.*" $sstate_files_list >$list_suffix 2>/dev/null
226 local deleted_tgz=0
227 local deleted_files=0
228 for ext in tgz tgz.siginfo tgz.done; do
229 echo "Figuring out the sstate:xxx_$suffix.$ext ... "
230 # Uniq BPNs
231 file_names=`for arch in $ava_archs ""; do
232 sed -ne "s%.*/sstate:\([^:]*\):[^:]*:[^:]*:[^:]*:$arch:[^:]*:[^:]*\.${ext}$%\1%p" $list_suffix
233 done | sort -u`
234
235 fn_tmp=`mktemp` || exit 1
236 rm_list="$remove_listdir/sstate:xxx_$suffix"
237 for fn in $file_names; do
238 [ -z "$verbose" ] || echo "Analyzing sstate:$fn-xxx_$suffix.${ext}"
239 for arch in $ava_archs ""; do
240 grep -h ".*/sstate:$fn:[^:]*:[^:]*:[^:]*:$arch:[^:]*:[^:]*\.${ext}$" $list_suffix >$fn_tmp
241 if [ -s $fn_tmp ] ; then
242 [ $debug -gt 1 ] && echo "Available files for $fn-$arch- with suffix $suffix.${ext}:" && cat $fn_tmp
243 # Use the modification time
244 to_del=$(ls -t $(cat $fn_tmp) | sed -n '1!p')
245 [ $debug -gt 2 ] && echo "Considering to delete: $to_del"
246 # The sstate file which is downloaded from the SSTATE_MIRROR is
247 # put in SSTATE_DIR, and there is a symlink in SSTATE_DIR/??/ to
248 # it, so filter it out from the remove list if it should not be
249 # removed.
250 to_keep=$(ls -t $(cat $fn_tmp) | sed -n '1p')
251 [ $debug -gt 2 ] && echo "Considering to keep: $to_keep"
252 for k in $to_keep; do
253 if [ -L "$k" ]; then
254 # The symlink's destination
255 k_dest="`readlink -e $k`"
256 # Maybe it is the one in cache_dir
257 k_maybe="$cache_dir/${k##/*/}"
258 # Remove it from the remove list if they are the same.
259 if [ "$k_dest" = "$k_maybe" ]; then
260 to_del="`echo $to_del | sed 's#'\"$k_maybe\"'##g'`"
261 fi
262 fi
263 done
264 rm -f $fn_tmp
265 [ $debug -gt 2 ] && echo "Decided to delete: $to_del"
266 gen_rmlist $rm_list.$ext "$to_del"
267 fi
268 done
269 done
270 done
271 deleted_tgz=`cat $rm_list.* 2>/dev/null | grep ".tgz$" | wc -l`
272 deleted_files=`cat $rm_list.* 2>/dev/null | wc -l`
273 [ "$deleted_files" -gt 0 -a $debug -gt 0 ] && cat $rm_list.*
274 echo "($deleted_tgz out of $total_tgz_suffix .tgz files for $suffix suffix will be removed or $deleted_files out of $total_files_suffix when counting also .siginfo and .done files)"
275 let total_deleted=$total_deleted+$deleted_files
276 done
277 deleted_tgz=0
278 rm_old_list=$remove_listdir/sstate-old-filenames
279 find $cache_dir -name 'sstate-*.tgz' >$rm_old_list
280 [ -s "$rm_old_list" ] && deleted_tgz=`cat $rm_old_list | grep ".tgz$" | wc -l`
281 [ -s "$rm_old_list" ] && deleted_files=`cat $rm_old_list | wc -l`
282 [ -s "$rm_old_list" -a $debug -gt 0 ] && cat $rm_old_list
283 echo "($deleted_tgz .tgz files with old sstate-* filenames will be removed or $deleted_files when counting also .siginfo and .done files)"
284 let total_deleted=$total_deleted+$deleted_files
285
286 rm -f $list_suffix
287 rm -f $sstate_files_list
288 if [ $total_deleted -gt 0 ]; then
289 read_confirm
290 if [ "$confirm" = "y" -o "$confirm" = "Y" ]; then
291 for list in `ls $remove_listdir/`; do
292 echo "Removing $list.tgz (`cat $remove_listdir/$list | wc -w` files) ... "
293 # Remove them one by one to avoid the argument list too long error
294 for i in `cat $remove_listdir/$list`; do
295 rm -f $verbose $i
296 done
297 echo "Done"
298 done
299 echo "$total_deleted files have been removed!"
300 else
301 do_nothing
302 fi
303 else
304 no_files
305 fi
306 [ -d $remove_listdir ] && rm -fr $remove_listdir
307}
308
309# Remove the sstate file by stamps dir, the file not used by the stamps dir
310# will be removed.
311rm_by_stamps (){
312
313 local cache_list=`mktemp` || exit 1
314 local keep_list=`mktemp` || exit 1
315 local rm_list=`mktemp` || exit 1
316 local sums
317 local all_sums
318
319 # Total number of files including sstate-, .siginfo and .done files
320 total_files=`find $cache_dir -type f -name 'sstate*' | wc -l`
321 # Save all the state file list to a file
322 find $cache_dir -type f -name 'sstate*' | sort -u -o $cache_list
323
324 echo "Figuring out the suffixes in the sstate cache dir ... "
325 local sstate_suffixes="`sed 's%.*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^_]*_\([^:]*\)\.tgz.*%\1%g' $cache_list | sort -u`"
326 echo "Done"
327 echo "The following suffixes have been found in the cache dir:"
328 echo $sstate_suffixes
329
330 # Figure out all the md5sums in the stamps dir.
331 echo "Figuring out all the md5sums in stamps dir ... "
332 for i in $sstate_suffixes; do
333 # There is no "\.sigdata" but "_setcene" when it is mirrored
334 # from the SSTATE_MIRRORS, use them to figure out the sum.
335 sums=`find $stamps -maxdepth 3 -name "*.do_$i.*" \
336 -o -name "*.do_${i}_setscene.*" | \
337 sed -ne 's#.*_setscene\.##p' -e 's#.*\.sigdata\.##p' | \
338 sed -e 's#\..*##' | sort -u`
339 all_sums="$all_sums $sums"
340 done
341 echo "Done"
342
343 echo "Figuring out the files which will be removed ... "
344 for i in $all_sums; do
345 grep ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:${i}_.*" $cache_list >>$keep_list
346 done
347 echo "Done"
348
349 if [ -s $keep_list ]; then
350 sort -u $keep_list -o $keep_list
351 to_del=`comm -1 -3 $keep_list $cache_list`
352 gen_rmlist $rm_list "$to_del"
353 let total_deleted=`cat $rm_list | sort -u | wc -w`
354 if [ $total_deleted -gt 0 ]; then
355 [ $debug -gt 0 ] && cat $rm_list | sort -u
356 read_confirm
357 if [ "$confirm" = "y" -o "$confirm" = "Y" ]; then
358 echo "Removing sstate cache files ... ($total_deleted files)"
359 # Remove them one by one to avoid the argument list too long error
360 for i in `cat $rm_list | sort -u`; do
361 rm -f $verbose $i
362 done
363 echo "$total_deleted files have been removed"
364 else
365 do_nothing
366 fi
367 else
368 no_files
369 fi
370 else
371 echo_error "All files in cache dir will be removed! Abort!"
372 fi
373
374 rm -f $cache_list
375 rm -f $keep_list
376 rm -f $rm_list
377}
378
379# Parse arguments
380while [ -n "$1" ]; do
381 case $1 in
382 --cache-dir=*)
383 cache_dir=`echo $1 | sed -e 's#^--cache-dir=##' | xargs readlink -e`
384 [ -d "$cache_dir" ] || echo_error "Invalid argument to --cache-dir"
385 shift
386 ;;
387 --remove-duplicated|-d)
388 rm_duplicated="y"
389 shift
390 ;;
391 --yes|-y)
392 confirm="y"
393 shift
394 ;;
395 --follow-symlink|-L)
396 fsym="y"
397 shift
398 ;;
399 --extra-archs=*)
400 extra_archs=`echo $1 | sed -e 's#^--extra-archs=##' -e 's#,# #g'`
401 [ -n "$extra_archs" ] || echo_error "Invalid extra arch parameter"
402 shift
403 ;;
404 --extra-layer=*)
405 extra_layers=`echo $1 | sed -e 's#^--extra-layer=##' -e 's#,# #g'`
406 [ -n "$extra_layers" ] || echo_error "Invalid extra layer parameter"
407 for i in $extra_layers; do
408 l=`readlink -e $i`
409 if [ -d "$l" ]; then
410 layers="$layers $l"
411 else
412 echo_error "Can't find layer $i"
413 fi
414 done
415 shift
416 ;;
417 --stamps-dir=*)
418 stamps=`echo $1 | sed -e 's#^--stamps-dir=##' -e 's#,# #g'`
419 [ -n "$stamps" ] || echo_error "Invalid stamps dir $i"
420 for i in $stamps; do
421 [ -d "$i" ] || echo_error "Invalid stamps dir $i"
422 done
423 shift
424 ;;
425 --verbose|-v)
426 verbose="-v"
427 shift
428 ;;
429 --debug|-D)
430 debug=`expr $debug + 1`
431 echo "Debug level $debug"
432 shift
433 ;;
434 --help|-h)
435 usage
436 exit 0
437 ;;
438 *)
439 echo "Invalid arguments $*"
440 echo_error "Try 'sstate-cache-management.sh -h' for more information."
441 ;;
442 esac
443done
444
445# sstate cache directory, use environment variable SSTATE_CACHE_DIR
446# if it was not specified, otherwise, error.
447[ -n "$cache_dir" ] || cache_dir=$SSTATE_CACHE_DIR
448[ -n "$cache_dir" ] || echo_error "No cache dir found!"
449[ -d "$cache_dir" ] || echo_error "Invalid cache directory \"$cache_dir\""
450
451[ -n "$rm_duplicated" -a -n "$stamps" ] && \
452 echo_error "Can not use both --remove-duplicated and --stamps-dir"
453
454[ "$rm_duplicated" = "y" ] && remove_duplicated
455[ -n "$stamps" ] && rm_by_stamps
456[ -z "$rm_duplicated" -a -z "$stamps" ] && \
457 echo "What do you want to do?"
458exit 0
diff --git a/scripts/sstate-diff-machines.sh b/scripts/sstate-diff-machines.sh
index 1d721eb87d..5ed413b2ee 100755
--- a/scripts/sstate-diff-machines.sh
+++ b/scripts/sstate-diff-machines.sh
@@ -1,5 +1,7 @@
1#!/bin/bash 1#!/bin/bash
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Used to compare sstate checksums between MACHINES. 7# Used to compare sstate checksums between MACHINES.
@@ -127,6 +129,8 @@ for M in ${machines}; do
127 fi 129 fi
128done 130done
129 131
132COMPARE_TASKS="do_configure.sigdata do_populate_sysroot.sigdata do_package_write_ipk.sigdata do_package_write_rpm.sigdata do_package_write_deb.sigdata do_package_write_tar.sigdata"
133
130function compareSignatures() { 134function compareSignatures() {
131 MACHINE1=$1 135 MACHINE1=$1
132 MACHINE2=$2 136 MACHINE2=$2
@@ -134,7 +138,7 @@ function compareSignatures() {
134 PRE_PATTERN="" 138 PRE_PATTERN=""
135 [ -n "${PATTERN}" ] || PRE_PATTERN="-v" 139 [ -n "${PATTERN}" ] || PRE_PATTERN="-v"
136 [ -n "${PATTERN}" ] || PATTERN="MACHINE" 140 [ -n "${PATTERN}" ] || PATTERN="MACHINE"
137 for TASK in do_configure.sigdata do_populate_sysroot.sigdata do_package_write_ipk.sigdata; do 141 for TASK in $COMPARE_TASKS; do
138 printf "\n\n === Comparing signatures for task ${TASK} between ${MACHINE1} and ${MACHINE2} ===\n" | tee -a ${OUTPUT}/signatures.${MACHINE2}.${TASK}.log 142 printf "\n\n === Comparing signatures for task ${TASK} between ${MACHINE1} and ${MACHINE2} ===\n" | tee -a ${OUTPUT}/signatures.${MACHINE2}.${TASK}.log
139 diff ${OUTPUT}/${MACHINE1}/list.M ${OUTPUT}/${MACHINE2}/list.M | grep ${PRE_PATTERN} "${PATTERN}" | grep ${TASK} > ${OUTPUT}/signatures.${MACHINE2}.${TASK} 143 diff ${OUTPUT}/${MACHINE1}/list.M ${OUTPUT}/${MACHINE2}/list.M | grep ${PRE_PATTERN} "${PATTERN}" | grep ${TASK} > ${OUTPUT}/signatures.${MACHINE2}.${TASK}
140 for i in `cat ${OUTPUT}/signatures.${MACHINE2}.${TASK} | sed 's#[^/]*/\([^/]*\)/.*#\1#g' | sort -u | xargs`; do 144 for i in `cat ${OUTPUT}/signatures.${MACHINE2}.${TASK} | sed 's#[^/]*/\([^/]*\)/.*#\1#g' | sort -u | xargs`; do
diff --git a/scripts/sstate-sysroot-cruft.sh b/scripts/sstate-sysroot-cruft.sh
index fbf1ca3c43..b2002badfb 100755
--- a/scripts/sstate-sysroot-cruft.sh
+++ b/scripts/sstate-sysroot-cruft.sh
@@ -1,5 +1,7 @@
1#!/bin/sh 1#!/bin/sh
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5# Used to find files installed in sysroot which are not tracked by sstate manifest 7# Used to find files installed in sysroot which are not tracked by sstate manifest
@@ -145,18 +147,6 @@ WHITELIST="${WHITELIST} \
145 .*/var/cache/fontconfig/ \ 147 .*/var/cache/fontconfig/ \
146" 148"
147 149
148# created by oe.utils.write_ld_so_conf which is used from few bbclasses and recipes:
149# meta/classes/image-prelink.bbclass: oe.utils.write_ld_so_conf(d)
150# meta/classes/insane.bbclass: oe.utils.write_ld_so_conf(d)
151# meta/classes/insane.bbclass: oe.utils.write_ld_so_conf(d)
152# meta/recipes-gnome/gobject-introspection/gobject-introspection_1.48.0.bb: oe.utils.write_ld_so_conf(d)
153# meta/recipes-gnome/gobject-introspection/gobject-introspection_1.48.0.bb: oe.utils.write_ld_so_conf(d)
154# introduced in oe-core commit 7fd1d7e639c2ed7e0699937a5cb245c187b7c811
155# and more visible since added to gobject-introspection in 10e0c1a3a452baa05d160a92a54b2e33cf0fd061
156WHITELIST="${WHITELIST} \
157 [^/]*/etc/ld.so.conf \
158"
159
160SYSROOTS="`readlink -f ${tmpdir}`/sysroots/" 150SYSROOTS="`readlink -f ${tmpdir}`/sysroots/"
161 151
162mkdir ${OUTPUT} 152mkdir ${OUTPUT}
diff --git a/scripts/sysroot-relativelinks.py b/scripts/sysroot-relativelinks.py
index 56e36f3ad5..ccb3c867f0 100755
--- a/scripts/sysroot-relativelinks.py
+++ b/scripts/sysroot-relativelinks.py
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
diff --git a/scripts/task-time b/scripts/task-time
index bcd1e25817..8f71b29b77 100755
--- a/scripts/task-time
+++ b/scripts/task-time
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
diff --git a/scripts/verify-bashisms b/scripts/verify-bashisms
index fb0cc719ea..fc3677c6ed 100755
--- a/scripts/verify-bashisms
+++ b/scripts/verify-bashisms
@@ -1,11 +1,13 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
4# 6#
5 7
6import sys, os, subprocess, re, shutil 8import sys, os, subprocess, re, shutil
7 9
8whitelist = ( 10allowed = (
9 # type is supported by dash 11 # type is supported by dash
10 'if type systemctl >/dev/null 2>/dev/null; then', 12 'if type systemctl >/dev/null 2>/dev/null; then',
11 'if type systemd-tmpfiles >/dev/null 2>/dev/null; then', 13 'if type systemd-tmpfiles >/dev/null 2>/dev/null; then',
@@ -19,8 +21,8 @@ whitelist = (
19 '. $target_sdk_dir/${oe_init_build_env_path} $target_sdk_dir >> $LOGFILE' 21 '. $target_sdk_dir/${oe_init_build_env_path} $target_sdk_dir >> $LOGFILE'
20 ) 22 )
21 23
22def is_whitelisted(s): 24def is_allowed(s):
23 for w in whitelist: 25 for w in allowed:
24 if w in s: 26 if w in s:
25 return True 27 return True
26 return False 28 return False
@@ -49,7 +51,7 @@ def process(filename, function, lineno, script):
49 output = e.output.replace(fn.name, function) 51 output = e.output.replace(fn.name, function)
50 if not output or not output.startswith('possible bashism'): 52 if not output or not output.startswith('possible bashism'):
51 # Probably starts with or contains only warnings. Dump verbatim 53 # Probably starts with or contains only warnings. Dump verbatim
52 # with one space indention. Can't do the splitting and whitelist 54 # with one space indention. Can't do the splitting and allowed
53 # checking below. 55 # checking below.
54 return '\n'.join([filename, 56 return '\n'.join([filename,
55 ' Unexpected output from checkbashisms.pl'] + 57 ' Unexpected output from checkbashisms.pl'] +
@@ -65,7 +67,7 @@ def process(filename, function, lineno, script):
65 # ... 67 # ...
66 # ... 68 # ...
67 result = [] 69 result = []
68 # Check the results against the whitelist 70 # Check the results against the allowed list
69 for message, source in zip(output[0::2], output[1::2]): 71 for message, source in zip(output[0::2], output[1::2]):
70 if not is_whitelisted(source): 72 if not is_whitelisted(source):
71 if lineno is not None: 73 if lineno is not None:
@@ -100,7 +102,7 @@ if __name__=='__main__':
100 args = parser.parse_args() 102 args = parser.parse_args()
101 103
102 if shutil.which("checkbashisms.pl") is None: 104 if shutil.which("checkbashisms.pl") is None:
103 print("Cannot find checkbashisms.pl on $PATH, get it from https://anonscm.debian.org/cgit/collab-maint/devscripts.git/plain/scripts/checkbashisms.pl") 105 print("Cannot find checkbashisms.pl on $PATH, get it from https://salsa.debian.org/debian/devscripts/raw/master/scripts/checkbashisms.pl")
104 sys.exit(1) 106 sys.exit(1)
105 107
106 # The order of defining the worker function, 108 # The order of defining the worker function,
diff --git a/scripts/wic b/scripts/wic
index a741aed364..06e0b48db0 100755
--- a/scripts/wic
+++ b/scripts/wic
@@ -22,9 +22,9 @@ import sys
22import argparse 22import argparse
23import logging 23import logging
24import subprocess 24import subprocess
25import shutil
25 26
26from collections import namedtuple 27from collections import namedtuple
27from distutils import spawn
28 28
29# External modules 29# External modules
30scripts_path = os.path.dirname(os.path.realpath(__file__)) 30scripts_path = os.path.dirname(os.path.realpath(__file__))
@@ -47,7 +47,7 @@ if os.environ.get('SDKTARGETSYSROOT'):
47 break 47 break
48 sdkroot = os.path.dirname(sdkroot) 48 sdkroot = os.path.dirname(sdkroot)
49 49
50bitbake_exe = spawn.find_executable('bitbake') 50bitbake_exe = shutil.which('bitbake')
51if bitbake_exe: 51if bitbake_exe:
52 bitbake_path = scriptpath.add_bitbake_lib_path() 52 bitbake_path = scriptpath.add_bitbake_lib_path()
53 import bb 53 import bb
@@ -159,6 +159,9 @@ def wic_create_subcommand(options, usage_str):
159 "(Use -e/--image-name to specify it)") 159 "(Use -e/--image-name to specify it)")
160 native_sysroot = options.native_sysroot 160 native_sysroot = options.native_sysroot
161 161
162 if options.kernel_dir:
163 kernel_dir = options.kernel_dir
164
162 if not options.vars_dir and (not native_sysroot or not os.path.isdir(native_sysroot)): 165 if not options.vars_dir and (not native_sysroot or not os.path.isdir(native_sysroot)):
163 logger.info("Building wic-tools...\n") 166 logger.info("Building wic-tools...\n")
164 subprocess.check_call(["bitbake", "wic-tools"]) 167 subprocess.check_call(["bitbake", "wic-tools"])
@@ -206,7 +209,7 @@ def wic_create_subcommand(options, usage_str):
206 logger.info(" (Please check that the build artifacts for the machine") 209 logger.info(" (Please check that the build artifacts for the machine")
207 logger.info(" selected in local.conf actually exist and that they") 210 logger.info(" selected in local.conf actually exist and that they")
208 logger.info(" are the correct artifacts for the image (.wks file)).\n") 211 logger.info(" are the correct artifacts for the image (.wks file)).\n")
209 raise WicError("The artifact that couldn't be found was %s:\n %s", not_found, not_found_dir) 212 raise WicError("The artifact that couldn't be found was %s:\n %s" % (not_found, not_found_dir))
210 213
211 krootfs_dir = options.rootfs_dir 214 krootfs_dir = options.rootfs_dir
212 if krootfs_dir is None: 215 if krootfs_dir is None:
@@ -346,6 +349,8 @@ def wic_init_parser_create(subparser):
346 default=False, help="output debug information") 349 default=False, help="output debug information")
347 subparser.add_argument("-i", "--imager", dest="imager", 350 subparser.add_argument("-i", "--imager", dest="imager",
348 default="direct", help="the wic imager plugin") 351 default="direct", help="the wic imager plugin")
352 subparser.add_argument("--extra-space", type=int, dest="extra_space",
353 default=0, help="additional free disk space to add to the image")
349 return 354 return
350 355
351 356
diff --git a/scripts/yocto-check-layer b/scripts/yocto-check-layer
index b7c83c8b54..67cc71950f 100755
--- a/scripts/yocto-check-layer
+++ b/scripts/yocto-check-layer
@@ -24,7 +24,7 @@ import scriptpath
24scriptpath.add_oe_lib_path() 24scriptpath.add_oe_lib_path()
25scriptpath.add_bitbake_lib_path() 25scriptpath.add_bitbake_lib_path()
26 26
27from checklayer import LayerType, detect_layers, add_layers, add_layer_dependencies, get_signatures, check_bblayers 27from checklayer import LayerType, detect_layers, add_layers, add_layer_dependencies, get_layer_dependencies, get_signatures, check_bblayers, sanity_check_layers
28from oeqa.utils.commands import get_bb_vars 28from oeqa.utils.commands import get_bb_vars
29 29
30PROGNAME = 'yocto-check-layer' 30PROGNAME = 'yocto-check-layer'
@@ -41,6 +41,12 @@ def test_layer(td, layer, test_software_layer_signatures):
41 tc.loadTests(CASES_PATHS) 41 tc.loadTests(CASES_PATHS)
42 return tc.runTests() 42 return tc.runTests()
43 43
44def dump_layer_debug(layer):
45 logger.debug("Found layer %s (%s)" % (layer["name"], layer["path"]))
46 collections = layer.get("collections", {})
47 if collections:
48 logger.debug("%s collections: %s" % (layer["name"], ", ".join(collections)))
49
44def main(): 50def main():
45 parser = argparse.ArgumentParser( 51 parser = argparse.ArgumentParser(
46 description="Yocto Project layer checking tool", 52 description="Yocto Project layer checking tool",
@@ -51,6 +57,8 @@ def main():
51 help='File to output log (optional)', action='store') 57 help='File to output log (optional)', action='store')
52 parser.add_argument('--dependency', nargs="+", 58 parser.add_argument('--dependency', nargs="+",
53 help='Layers to process for dependencies', action='store') 59 help='Layers to process for dependencies', action='store')
60 parser.add_argument('--no-auto-dependency', help='Disable automatic testing of dependencies',
61 action='store_true')
54 parser.add_argument('--machines', nargs="+", 62 parser.add_argument('--machines', nargs="+",
55 help='List of MACHINEs to be used during testing', action='store') 63 help='List of MACHINEs to be used during testing', action='store')
56 parser.add_argument('--additional-layers', nargs="+", 64 parser.add_argument('--additional-layers', nargs="+",
@@ -104,6 +112,17 @@ def main():
104 else: 112 else:
105 dep_layers = layers 113 dep_layers = layers
106 114
115 logger.debug("Found additional layers:")
116 for l in additional_layers:
117 dump_layer_debug(l)
118 logger.debug("Found dependency layers:")
119 for l in dep_layers:
120 dump_layer_debug(l)
121
122 if not sanity_check_layers(additional_layers + dep_layers, logger):
123 logger.error("Failed layer validation")
124 return 1
125
107 logger.info("Detected layers:") 126 logger.info("Detected layers:")
108 for layer in layers: 127 for layer in layers:
109 if layer['type'] == LayerType.ERROR_BSP_DISTRO: 128 if layer['type'] == LayerType.ERROR_BSP_DISTRO:
@@ -112,7 +131,7 @@ def main():
112 % layer['name']) 131 % layer['name'])
113 layers.remove(layer) 132 layers.remove(layer)
114 elif layer['type'] == LayerType.ERROR_NO_LAYER_CONF: 133 elif layer['type'] == LayerType.ERROR_NO_LAYER_CONF:
115 logger.error("%s: Don't have conf/layer.conf file."\ 134 logger.info("%s: Doesn't have conf/layer.conf file, so ignoring"\
116 % layer['name']) 135 % layer['name'])
117 layers.remove(layer) 136 layers.remove(layer)
118 else: 137 else:
@@ -121,6 +140,21 @@ def main():
121 if not layers: 140 if not layers:
122 return 1 141 return 1
123 142
143 # Find all dependencies, and get them checked too
144 if not args.no_auto_dependency:
145 depends = []
146 for layer in layers:
147 layer_depends = get_layer_dependencies(layer, dep_layers, logger)
148 if layer_depends:
149 for d in layer_depends:
150 if d not in depends:
151 depends.append(d)
152
153 for d in depends:
154 if d not in layers:
155 logger.info("Adding %s to the list of layers to test, as a dependency", d['name'])
156 layers.append(d)
157
124 shutil.copyfile(bblayersconf, bblayersconf + '.backup') 158 shutil.copyfile(bblayersconf, bblayersconf + '.backup')
125 def cleanup_bblayers(signum, frame): 159 def cleanup_bblayers(signum, frame):
126 shutil.copyfile(bblayersconf + '.backup', bblayersconf) 160 shutil.copyfile(bblayersconf + '.backup', bblayersconf)
@@ -134,11 +168,13 @@ def main():
134 168
135 layers_tested = 0 169 layers_tested = 0
136 for layer in layers: 170 for layer in layers:
137 if layer['type'] == LayerType.ERROR_NO_LAYER_CONF or \ 171 if layer['type'] in (LayerType.ERROR_NO_LAYER_CONF, LayerType.ERROR_BSP_DISTRO):
138 layer['type'] == LayerType.ERROR_BSP_DISTRO:
139 continue 172 continue
140 173
141 if check_bblayers(bblayersconf, layer['path'], logger): 174 # Reset to a clean backup copy for each run
175 shutil.copyfile(bblayersconf + '.backup', bblayersconf)
176
177 if layer['type'] not in (LayerType.CORE, ) and check_bblayers(bblayersconf, layer['path'], logger):
142 logger.info("%s already in %s. To capture initial signatures, layer under test should not present " 178 logger.info("%s already in %s. To capture initial signatures, layer under test should not present "
143 "in BBLAYERS. Please remove %s from BBLAYERS." % (layer['name'], bblayersconf, layer['name'])) 179 "in BBLAYERS. Please remove %s from BBLAYERS." % (layer['name'], bblayersconf, layer['name']))
144 results[layer['name']] = None 180 results[layer['name']] = None
@@ -149,17 +185,13 @@ def main():
149 logger.info("Setting up for %s(%s), %s" % (layer['name'], layer['type'], 185 logger.info("Setting up for %s(%s), %s" % (layer['name'], layer['type'],
150 layer['path'])) 186 layer['path']))
151 187
152 shutil.copyfile(bblayersconf + '.backup', bblayersconf)
153
154 missing_dependencies = not add_layer_dependencies(bblayersconf, layer, dep_layers, logger) 188 missing_dependencies = not add_layer_dependencies(bblayersconf, layer, dep_layers, logger)
155 if not missing_dependencies: 189 if not missing_dependencies:
156 for additional_layer in additional_layers: 190 for additional_layer in additional_layers:
157 if not add_layer_dependencies(bblayersconf, additional_layer, dep_layers, logger): 191 if not add_layer_dependencies(bblayersconf, additional_layer, dep_layers, logger):
158 missing_dependencies = True 192 missing_dependencies = True
159 break 193 break
160 if not add_layer_dependencies(bblayersconf, layer, dep_layers, logger) or \ 194 if missing_dependencies:
161 any(map(lambda additional_layer: not add_layer_dependencies(bblayersconf, additional_layer, dep_layers, logger),
162 additional_layers)):
163 logger.info('Skipping %s due to missing dependencies.' % layer['name']) 195 logger.info('Skipping %s due to missing dependencies.' % layer['name'])
164 results[layer['name']] = None 196 results[layer['name']] = None
165 results_status[layer['name']] = 'SKIPPED (Missing dependencies)' 197 results_status[layer['name']] = 'SKIPPED (Missing dependencies)'
diff --git a/scripts/yocto_testresults_query.py b/scripts/yocto_testresults_query.py
new file mode 100755
index 0000000000..521ead8473
--- /dev/null
+++ b/scripts/yocto_testresults_query.py
@@ -0,0 +1,131 @@
1#!/usr/bin/env python3
2
3# Yocto Project test results management tool
4# This script is an thin layer over resulttool to manage tes results and regression reports.
5# Its main feature is to translate tags or branch names to revisions SHA1, and then to run resulttool
6# with those computed revisions
7#
8# Copyright (C) 2023 OpenEmbedded Contributors
9#
10# SPDX-License-Identifier: MIT
11#
12
13import sys
14import os
15import argparse
16import subprocess
17import tempfile
18import lib.scriptutils as scriptutils
19
20script_path = os.path.dirname(os.path.realpath(__file__))
21poky_path = os.path.abspath(os.path.join(script_path, ".."))
22resulttool = os.path.abspath(os.path.join(script_path, "resulttool"))
23logger = scriptutils.logger_create(sys.argv[0])
24testresults_default_url="git://git.yoctoproject.org/yocto-testresults"
25
26def create_workdir():
27 workdir = tempfile.mkdtemp(prefix='yocto-testresults-query.')
28 logger.info(f"Shallow-cloning testresults in {workdir}")
29 subprocess.check_call(["git", "clone", testresults_default_url, workdir, "--depth", "1"])
30 return workdir
31
32def get_sha1(pokydir, revision):
33 try:
34 rev = subprocess.check_output(["git", "rev-list", "-n", "1", revision], cwd=pokydir).decode('utf-8').strip()
35 logger.info(f"SHA-1 revision for {revision} in {pokydir} is {rev}")
36 return rev
37 except subprocess.CalledProcessError:
38 logger.error(f"Can not find SHA-1 for {revision} in {pokydir}")
39 return None
40
41def get_branch(tag):
42 # The tags in test results repository, as returned by git rev-list, have the following form:
43 # refs/tags/<branch>/<count>-g<sha1>/<num>
44 return '/'.join(tag.split("/")[2:-2])
45
46def fetch_testresults(workdir, sha1):
47 logger.info(f"Fetching test results for {sha1} in {workdir}")
48 rawtags = subprocess.check_output(["git", "ls-remote", "--refs", "--tags", "origin", f"*{sha1}*"], cwd=workdir).decode('utf-8').strip()
49 if not rawtags:
50 raise Exception(f"No reference found for commit {sha1} in {workdir}")
51 branch = ""
52 for rev in [rawtag.split()[1] for rawtag in rawtags.splitlines()]:
53 if not branch:
54 branch = get_branch(rev)
55 logger.info(f"Fetching matching revision: {rev}")
56 subprocess.check_call(["git", "fetch", "--depth", "1", "origin", f"{rev}:{rev}"], cwd=workdir)
57 return branch
58
59def compute_regression_report(workdir, basebranch, baserevision, targetbranch, targetrevision, args):
60 logger.info(f"Running resulttool regression between SHA1 {baserevision} and {targetrevision}")
61 command = [resulttool, "regression-git", "--branch", basebranch, "--commit", baserevision, "--branch2", targetbranch, "--commit2", targetrevision, workdir]
62 if args.limit:
63 command.extend(["-l", args.limit])
64 report = subprocess.check_output(command).decode("utf-8")
65 return report
66
67def print_report_with_header(report, baseversion, baserevision, targetversion, targetrevision):
68 print("========================== Regression report ==============================")
69 print(f'{"=> Target:": <16}{targetversion: <16}({targetrevision})')
70 print(f'{"=> Base:": <16}{baseversion: <16}({baserevision})')
71 print("===========================================================================\n")
72 print(report, end='')
73
74def regression(args):
75 logger.info(f"Compute regression report between {args.base} and {args.target}")
76 if args.testresultsdir:
77 workdir = args.testresultsdir
78 else:
79 workdir = create_workdir()
80
81 try:
82 baserevision = get_sha1(poky_path, args.base)
83 targetrevision = get_sha1(poky_path, args.target)
84 if not baserevision or not targetrevision:
85 logger.error("One or more revision(s) missing. You might be targeting nonexistant tags/branches, or are in wrong repository (you must use Poky and not oe-core)")
86 if not args.testresultsdir:
87 subprocess.check_call(["rm", "-rf", workdir])
88 sys.exit(1)
89 basebranch = fetch_testresults(workdir, baserevision)
90 targetbranch = fetch_testresults(workdir, targetrevision)
91 report = compute_regression_report(workdir, basebranch, baserevision, targetbranch, targetrevision, args)
92 print_report_with_header(report, args.base, baserevision, args.target, targetrevision)
93 finally:
94 if not args.testresultsdir:
95 subprocess.check_call(["rm", "-rf", workdir])
96
97def main():
98 parser = argparse.ArgumentParser(description="Yocto Project test results helper")
99 subparsers = parser.add_subparsers(
100 help="Supported commands for test results helper",
101 required=True)
102 parser_regression_report = subparsers.add_parser(
103 "regression-report",
104 help="Generate regression report between two fixed revisions. Revisions can be branch name or tag")
105 parser_regression_report.add_argument(
106 'base',
107 help="Revision or tag against which to compare results (i.e: the older)")
108 parser_regression_report.add_argument(
109 'target',
110 help="Revision or tag to compare against the base (i.e: the newer)")
111 parser_regression_report.add_argument(
112 '-t',
113 '--testresultsdir',
114 help=f"An existing test results directory. {sys.argv[0]} will automatically clone it and use default branch if not provided")
115 parser_regression_report.add_argument(
116 '-l',
117 '--limit',
118 help=f"Maximum number of changes to display per test. Can be set to 0 to print all changes")
119 parser_regression_report.set_defaults(func=regression)
120
121 args = parser.parse_args()
122 args.func(args)
123
124if __name__ == '__main__':
125 try:
126 ret = main()
127 except Exception:
128 ret = 1
129 import traceback
130 traceback.print_exc()
131 sys.exit(ret)