summaryrefslogtreecommitdiffstats
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/autobuilder-worker-prereq-tests21
-rwxr-xr-xscripts/buildhistory-diff5
-rwxr-xr-xscripts/contrib/build-perf-test-wrapper.sh15
-rwxr-xr-xscripts/contrib/convert-spdx-licenses.py145
-rwxr-xr-xscripts/contrib/convert-variable-renames.py116
-rwxr-xr-xscripts/contrib/documentation-audit.sh4
-rwxr-xr-xscripts/contrib/oe-build-perf-report-email.py167
-rwxr-xr-xscripts/contrib/patchreview.py4
-rwxr-xr-xscripts/contrib/test_build_time.sh2
-rwxr-xr-xscripts/crosstap2
-rwxr-xr-xscripts/git26
-rwxr-xr-xscripts/install-buildtools6
-rw-r--r--scripts/lib/checklayer/__init__.py28
-rw-r--r--scripts/lib/checklayer/cases/bsp.py2
-rw-r--r--scripts/lib/checklayer/cases/common.py28
-rw-r--r--scripts/lib/devtool/deploy.py2
-rw-r--r--scripts/lib/devtool/sdk.py2
-rw-r--r--scripts/lib/devtool/upgrade.py15
-rw-r--r--scripts/lib/recipetool/create.py76
-rw-r--r--scripts/lib/recipetool/create_buildsys.py3
-rw-r--r--scripts/lib/recipetool/create_buildsys_python.py48
-rw-r--r--scripts/lib/recipetool/licenses.csv70
-rw-r--r--scripts/lib/scriptutils.py8
-rw-r--r--scripts/lib/wic/help.py6
-rw-r--r--scripts/lib/wic/ksparser.py1
-rw-r--r--scripts/lib/wic/misc.py1
-rw-r--r--scripts/lib/wic/partition.py3
-rw-r--r--scripts/lib/wic/pluginbase.py8
-rw-r--r--scripts/lib/wic/plugins/imager/direct.py5
-rw-r--r--scripts/lib/wic/plugins/source/rawcopy.py29
-rw-r--r--scripts/lib/wic/plugins/source/rootfs.py5
-rwxr-xr-xscripts/oe-buildenv-internal6
-rwxr-xr-xscripts/oe-check-sstate2
-rwxr-xr-xscripts/oe-pkgdata-util2
-rwxr-xr-xscripts/oe-setup-builddir16
-rw-r--r--scripts/postinst-intercepts/update_udev_hwdb5
-rwxr-xr-xscripts/relocate_sdk.py25
-rwxr-xr-xscripts/runqemu68
-rwxr-xr-xscripts/runqemu-addptable2image2
-rwxr-xr-xscripts/runqemu-ifdown1
-rwxr-xr-xscripts/sstate-sysroot-cruft.sh12
-rwxr-xr-xscripts/verify-bashisms10
-rwxr-xr-xscripts/wic3
-rwxr-xr-xscripts/yocto-check-layer19
44 files changed, 673 insertions, 351 deletions
diff --git a/scripts/autobuilder-worker-prereq-tests b/scripts/autobuilder-worker-prereq-tests
index 82e9a77..572227d 100755
--- a/scripts/autobuilder-worker-prereq-tests
+++ b/scripts/autobuilder-worker-prereq-tests
@@ -51,16 +51,31 @@ if (( $WATCHES < 65000 )); then
51 echo 'Need to increase watches (echo fs.inotify.max_user_watches=65536 | sudo tee -a /etc/sysctl.conf' 51 echo 'Need to increase watches (echo fs.inotify.max_user_watches=65536 | sudo tee -a /etc/sysctl.conf'
52 exit 1 52 exit 1
53fi 53fi
54OPEN_FILES=$(ulimit -n)
55if (( $OPEN_FILES < 65535 )); then
56 echo 'Increase maximum open files in /etc/security/limits.conf'
57 echo '* soft nofile 131072'
58 echo '* hard nofile 131072'
59 exit 1
60fi
61MAX_PROCESSES=$(ulimit -u)
62if (( $MAX_PROCESSES < 514542 )); then
63 echo 'Increase maximum user processes in /etc/security/limits.conf'
64 echo '* hard nproc 515294'
65 echo '* soft nproc 514543'
66 exit 1
67fi
68
54mkdir -p tmp/deploy/images/qemux86-64 69mkdir -p tmp/deploy/images/qemux86-64
55pushd tmp/deploy/images/qemux86-64 70pushd tmp/deploy/images/qemux86-64
56if [ ! -e core-image-minimal-qemux86-64.ext4 ]; then 71if [ ! -e core-image-minimal-qemux86-64.ext4 ]; then
57 wget http://downloads.yoctoproject.org/releases/yocto/yocto-2.5.1/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.ext4 72 wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.ext4
58fi 73fi
59if [ ! -e core-image-minimal-qemux86-64.qemuboot.conf ]; then 74if [ ! -e core-image-minimal-qemux86-64.qemuboot.conf ]; then
60 wget http://downloads.yoctoproject.org/releases/yocto/yocto-2.5.1/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.qemuboot.conf 75 wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.qemuboot.conf
61fi 76fi
62if [ ! -e bzImage-qemux86-64.bin ]; then 77if [ ! -e bzImage-qemux86-64.bin ]; then
63 wget http://downloads.yoctoproject.org/releases/yocto/yocto-2.5.1/machines/qemu/qemux86-64/bzImage-qemux86-64.bin 78 wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/bzImage-qemux86-64.bin
64fi 79fi
65popd 80popd
66bitbake qemu-helper-native 81bitbake qemu-helper-native
diff --git a/scripts/buildhistory-diff b/scripts/buildhistory-diff
index 3bd40a2..a6e785a 100755
--- a/scripts/buildhistory-diff
+++ b/scripts/buildhistory-diff
@@ -11,7 +11,6 @@
11import sys 11import sys
12import os 12import os
13import argparse 13import argparse
14from distutils.version import LooseVersion
15 14
16# Ensure PythonGit is installed (buildhistory_analysis needs it) 15# Ensure PythonGit is installed (buildhistory_analysis needs it)
17try: 16try:
@@ -73,10 +72,6 @@ def main():
73 parser = get_args_parser() 72 parser = get_args_parser()
74 args = parser.parse_args() 73 args = parser.parse_args()
75 74
76 if LooseVersion(git.__version__) < '0.3.1':
77 sys.stderr.write("Version of GitPython is too old, please install GitPython (python-git) 0.3.1 or later in order to use this script\n")
78 sys.exit(1)
79
80 if len(args.revisions) > 2: 75 if len(args.revisions) > 2:
81 sys.stderr.write('Invalid argument(s) specified: %s\n\n' % ' '.join(args.revisions[2:])) 76 sys.stderr.write('Invalid argument(s) specified: %s\n\n' % ' '.join(args.revisions[2:]))
82 parser.print_help() 77 parser.print_help()
diff --git a/scripts/contrib/build-perf-test-wrapper.sh b/scripts/contrib/build-perf-test-wrapper.sh
index fa71d4a..0a85e6e 100755
--- a/scripts/contrib/build-perf-test-wrapper.sh
+++ b/scripts/contrib/build-perf-test-wrapper.sh
@@ -87,21 +87,10 @@ if [ $# -ne 0 ]; then
87 exit 1 87 exit 1
88fi 88fi
89 89
90if [ -n "$email_to" ]; then
91 if ! [ -x "$(command -v phantomjs)" ]; then
92 echo "ERROR: Sending email needs phantomjs."
93 exit 1
94 fi
95 if ! [ -x "$(command -v optipng)" ]; then
96 echo "ERROR: Sending email needs optipng."
97 exit 1
98 fi
99fi
100
101# Open a file descriptor for flock and acquire lock 90# Open a file descriptor for flock and acquire lock
102LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock" 91LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock"
103if ! exec 3> "$LOCK_FILE"; then 92if ! exec 3> "$LOCK_FILE"; then
104 echo "ERROR: Unable to open lock file" 93 echo "ERROR: Unable to open loemack file"
105 exit 1 94 exit 1
106fi 95fi
107if ! flock -n 3; then 96if ! flock -n 3; then
@@ -226,7 +215,7 @@ if [ -n "$results_repo" ]; then
226 if [ -n "$email_to" ]; then 215 if [ -n "$email_to" ]; then
227 echo "Emailing test report" 216 echo "Emailing test report"
228 os_name=`get_os_release_var PRETTY_NAME` 217 os_name=`get_os_release_var PRETTY_NAME`
229 "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt --html $report_html "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}" 218 "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}"
230 fi 219 fi
231 220
232 # Upload report files, unless we're on detached head 221 # Upload report files, unless we're on detached head
diff --git a/scripts/contrib/convert-spdx-licenses.py b/scripts/contrib/convert-spdx-licenses.py
new file mode 100755
index 0000000..4e194de
--- /dev/null
+++ b/scripts/contrib/convert-spdx-licenses.py
@@ -0,0 +1,145 @@
1#!/usr/bin/env python3
2#
3# Conversion script to change LICENSE entries to SPDX identifiers
4#
5# Copyright (C) 2021-2022 Richard Purdie
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import re
11import os
12import sys
13import tempfile
14import shutil
15import mimetypes
16
17if len(sys.argv) < 2:
18 print("Please specify a directory to run the conversion script against.")
19 sys.exit(1)
20
21license_map = {
22"AGPL-3" : "AGPL-3.0-only",
23"AGPL-3+" : "AGPL-3.0-or-later",
24"AGPLv3" : "AGPL-3.0-only",
25"AGPLv3+" : "AGPL-3.0-or-later",
26"AGPLv3.0" : "AGPL-3.0-only",
27"AGPLv3.0+" : "AGPL-3.0-or-later",
28"AGPL-3.0" : "AGPL-3.0-only",
29"AGPL-3.0+" : "AGPL-3.0-or-later",
30"BSD-0-Clause" : "0BSD",
31"GPL-1" : "GPL-1.0-only",
32"GPL-1+" : "GPL-1.0-or-later",
33"GPLv1" : "GPL-1.0-only",
34"GPLv1+" : "GPL-1.0-or-later",
35"GPLv1.0" : "GPL-1.0-only",
36"GPLv1.0+" : "GPL-1.0-or-later",
37"GPL-1.0" : "GPL-1.0-only",
38"GPL-1.0+" : "GPL-1.0-or-later",
39"GPL-2" : "GPL-2.0-only",
40"GPL-2+" : "GPL-2.0-or-later",
41"GPLv2" : "GPL-2.0-only",
42"GPLv2+" : "GPL-2.0-or-later",
43"GPLv2.0" : "GPL-2.0-only",
44"GPLv2.0+" : "GPL-2.0-or-later",
45"GPL-2.0" : "GPL-2.0-only",
46"GPL-2.0+" : "GPL-2.0-or-later",
47"GPL-3" : "GPL-3.0-only",
48"GPL-3+" : "GPL-3.0-or-later",
49"GPLv3" : "GPL-3.0-only",
50"GPLv3+" : "GPL-3.0-or-later",
51"GPLv3.0" : "GPL-3.0-only",
52"GPLv3.0+" : "GPL-3.0-or-later",
53"GPL-3.0" : "GPL-3.0-only",
54"GPL-3.0+" : "GPL-3.0-or-later",
55"LGPLv2" : "LGPL-2.0-only",
56"LGPLv2+" : "LGPL-2.0-or-later",
57"LGPLv2.0" : "LGPL-2.0-only",
58"LGPLv2.0+" : "LGPL-2.0-or-later",
59"LGPL-2.0" : "LGPL-2.0-only",
60"LGPL-2.0+" : "LGPL-2.0-or-later",
61"LGPL2.1" : "LGPL-2.1-only",
62"LGPL2.1+" : "LGPL-2.1-or-later",
63"LGPLv2.1" : "LGPL-2.1-only",
64"LGPLv2.1+" : "LGPL-2.1-or-later",
65"LGPL-2.1" : "LGPL-2.1-only",
66"LGPL-2.1+" : "LGPL-2.1-or-later",
67"LGPLv3" : "LGPL-3.0-only",
68"LGPLv3+" : "LGPL-3.0-or-later",
69"LGPL-3.0" : "LGPL-3.0-only",
70"LGPL-3.0+" : "LGPL-3.0-or-later",
71"MPL-1" : "MPL-1.0",
72"MPLv1" : "MPL-1.0",
73"MPLv1.1" : "MPL-1.1",
74"MPLv2" : "MPL-2.0",
75"MIT-X" : "MIT",
76"MIT-style" : "MIT",
77"openssl" : "OpenSSL",
78"PSF" : "PSF-2.0",
79"PSFv2" : "PSF-2.0",
80"Python-2" : "Python-2.0",
81"Apachev2" : "Apache-2.0",
82"Apache-2" : "Apache-2.0",
83"Artisticv1" : "Artistic-1.0",
84"Artistic-1" : "Artistic-1.0",
85"AFL-2" : "AFL-2.0",
86"AFL-1" : "AFL-1.2",
87"AFLv2" : "AFL-2.0",
88"AFLv1" : "AFL-1.2",
89"CDDLv1" : "CDDL-1.0",
90"CDDL-1" : "CDDL-1.0",
91"EPLv1.0" : "EPL-1.0",
92"FreeType" : "FTL",
93"Nauman" : "Naumen",
94"tcl" : "TCL",
95"vim" : "Vim",
96"SGIv1" : "SGI-1",
97}
98
99def processfile(fn):
100 print("processing file '%s'" % fn)
101 try:
102 fh, abs_path = tempfile.mkstemp()
103 modified = False
104 with os.fdopen(fh, 'w') as new_file:
105 with open(fn, "r") as old_file:
106 for line in old_file:
107 if not line.startswith("LICENSE"):
108 new_file.write(line)
109 continue
110 orig = line
111 for license in sorted(license_map, key=len, reverse=True):
112 for ending in ['"', "'", " ", ")"]:
113 line = line.replace(license + ending, license_map[license] + ending)
114 if orig != line:
115 modified = True
116 new_file.write(line)
117 new_file.close()
118 if modified:
119 shutil.copymode(fn, abs_path)
120 os.remove(fn)
121 shutil.move(abs_path, fn)
122 except UnicodeDecodeError:
123 pass
124
125ourname = os.path.basename(sys.argv[0])
126ourversion = "0.01"
127
128if os.path.isfile(sys.argv[1]):
129 processfile(sys.argv[1])
130 sys.exit(0)
131
132for targetdir in sys.argv[1:]:
133 print("processing directory '%s'" % targetdir)
134 for root, dirs, files in os.walk(targetdir):
135 for name in files:
136 if name == ourname:
137 continue
138 fn = os.path.join(root, name)
139 if os.path.islink(fn):
140 continue
141 if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
142 continue
143 processfile(fn)
144
145print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-variable-renames.py b/scripts/contrib/convert-variable-renames.py
new file mode 100755
index 0000000..eded90c
--- /dev/null
+++ b/scripts/contrib/convert-variable-renames.py
@@ -0,0 +1,116 @@
1#!/usr/bin/env python3
2#
3# Conversion script to rename variables to versions with improved terminology.
4# Also highlights potentially problematic language and removed variables.
5#
6# Copyright (C) 2021 Richard Purdie
7# Copyright (C) 2022 Wind River Systems, Inc.
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12import re
13import os
14import sys
15import tempfile
16import shutil
17import mimetypes
18
19if len(sys.argv) < 2:
20 print("Please specify a directory to run the conversion script against.")
21 sys.exit(1)
22
23renames = {
24"BB_ENV_WHITELIST" : "BB_ENV_PASSTHROUGH",
25"BB_ENV_EXTRAWHITE" : "BB_ENV_PASSTHROUGH_ADDITIONS",
26"BB_HASHCONFIG_WHITELIST" : "BB_HASHCONFIG_IGNORE_VARS",
27"BB_SETSCENE_ENFORCE_WHITELIST" : "BB_SETSCENE_ENFORCE_IGNORE_TASKS",
28"BB_HASHBASE_WHITELIST" : "BB_BASEHASH_IGNORE_VARS",
29"BB_HASHTASK_WHITELIST" : "BB_TASKHASH_IGNORE_TASKS",
30"CVE_CHECK_PN_WHITELIST" : "CVE_CHECK_SKIP_RECIPE",
31"CVE_CHECK_WHITELIST" : "CVE_CHECK_IGNORE",
32"MULTI_PROVIDER_WHITELIST" : "BB_MULTI_PROVIDER_ALLOWED",
33"PNBLACKLIST" : "SKIP_RECIPE",
34"SDK_LOCAL_CONF_BLACKLIST" : "ESDK_LOCALCONF_REMOVE",
35"SDK_LOCAL_CONF_WHITELIST" : "ESDK_LOCALCONF_ALLOW",
36"SDK_INHERIT_BLACKLIST" : "ESDK_CLASS_INHERIT_DISABLE",
37"SSTATE_DUPWHITELIST" : "SSTATE_ALLOW_OVERLAP_FILES",
38"SYSROOT_DIRS_BLACKLIST" : "SYSROOT_DIRS_IGNORE",
39"UNKNOWN_CONFIGURE_WHITELIST" : "UNKNOWN_CONFIGURE_OPT_IGNORE",
40"ICECC_USER_CLASS_BL" : "ICECC_CLASS_DISABLE",
41"ICECC_SYSTEM_CLASS_BL" : "ICECC_CLASS_DISABLE",
42"ICECC_USER_PACKAGE_WL" : "ICECC_RECIPE_ENABLE",
43"ICECC_USER_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
44"ICECC_SYSTEM_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
45"LICENSE_FLAGS_WHITELIST" : "LICENSE_FLAGS_ACCEPTED",
46}
47
48removed_list = [
49"BB_STAMP_WHITELIST",
50"BB_STAMP_POLICY",
51"INHERIT_BLACKLIST",
52"TUNEABI_WHITELIST",
53]
54
55context_check_list = [
56"blacklist",
57"whitelist",
58"abort",
59]
60
61def processfile(fn):
62
63 print("processing file '%s'" % fn)
64 try:
65 fh, abs_path = tempfile.mkstemp()
66 modified = False
67 with os.fdopen(fh, 'w') as new_file:
68 with open(fn, "r") as old_file:
69 lineno = 0
70 for line in old_file:
71 lineno += 1
72 if not line or "BB_RENAMED_VARIABLE" in line:
73 continue
74 # Do the renames
75 for old_name, new_name in renames.items():
76 if old_name in line:
77 line = line.replace(old_name, new_name)
78 modified = True
79 # Find removed names
80 for removed_name in removed_list:
81 if removed_name in line:
82 print("%s needs further work at line %s because %s has been deprecated" % (fn, lineno, removed_name))
83 for check_word in context_check_list:
84 if re.search(check_word, line, re.IGNORECASE):
85 print("%s needs further work at line %s since it contains %s"% (fn, lineno, check_word))
86 new_file.write(line)
87 new_file.close()
88 if modified:
89 print("*** Modified file '%s'" % (fn))
90 shutil.copymode(fn, abs_path)
91 os.remove(fn)
92 shutil.move(abs_path, fn)
93 except UnicodeDecodeError:
94 pass
95
96ourname = os.path.basename(sys.argv[0])
97ourversion = "0.1"
98
99if os.path.isfile(sys.argv[1]):
100 processfile(sys.argv[1])
101 sys.exit(0)
102
103for targetdir in sys.argv[1:]:
104 print("processing directory '%s'" % targetdir)
105 for root, dirs, files in os.walk(targetdir):
106 for name in files:
107 if name == ourname:
108 continue
109 fn = os.path.join(root, name)
110 if os.path.islink(fn):
111 continue
112 if "ChangeLog" in fn or "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
113 continue
114 processfile(fn)
115
116print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/documentation-audit.sh b/scripts/contrib/documentation-audit.sh
index f436f9b..36f7f32 100755
--- a/scripts/contrib/documentation-audit.sh
+++ b/scripts/contrib/documentation-audit.sh
@@ -26,8 +26,8 @@ if [ -z "$BITBAKE" ]; then
26fi 26fi
27 27
28echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results" 28echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results"
29echo "REMINDER: you need to set LICENSE_FLAGS_WHITELIST appropriately in local.conf or " 29echo "REMINDER: you need to set LICENSE_FLAGS_ACCEPTED appropriately in local.conf or "
30echo " you'll get false positives. For example, LICENSE_FLAGS_WHITELIST = \"commercial\"" 30echo " you'll get false positives. For example, LICENSE_FLAGS_ACCEPTED = \"commercial\""
31 31
32for pkg in `bitbake -s | awk '{ print \$1 }'`; do 32for pkg in `bitbake -s | awk '{ print \$1 }'`; do
33 if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" || 33 if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" ||
diff --git a/scripts/contrib/oe-build-perf-report-email.py b/scripts/contrib/oe-build-perf-report-email.py
index de3862c..7192113 100755
--- a/scripts/contrib/oe-build-perf-report-email.py
+++ b/scripts/contrib/oe-build-perf-report-email.py
@@ -19,8 +19,6 @@ import socket
19import subprocess 19import subprocess
20import sys 20import sys
21import tempfile 21import tempfile
22from email.mime.image import MIMEImage
23from email.mime.multipart import MIMEMultipart
24from email.mime.text import MIMEText 22from email.mime.text import MIMEText
25 23
26 24
@@ -29,30 +27,6 @@ logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
29log = logging.getLogger('oe-build-perf-report') 27log = logging.getLogger('oe-build-perf-report')
30 28
31 29
32# Find js scaper script
33SCRAPE_JS = os.path.join(os.path.dirname(__file__), '..', 'lib', 'build_perf',
34 'scrape-html-report.js')
35if not os.path.isfile(SCRAPE_JS):
36 log.error("Unableto find oe-build-perf-report-scrape.js")
37 sys.exit(1)
38
39
40class ReportError(Exception):
41 """Local errors"""
42 pass
43
44
45def check_utils():
46 """Check that all needed utils are installed in the system"""
47 missing = []
48 for cmd in ('phantomjs', 'optipng'):
49 if not shutil.which(cmd):
50 missing.append(cmd)
51 if missing:
52 log.error("The following tools are missing: %s", ' '.join(missing))
53 sys.exit(1)
54
55
56def parse_args(argv): 30def parse_args(argv):
57 """Parse command line arguments""" 31 """Parse command line arguments"""
58 description = """Email build perf test report""" 32 description = """Email build perf test report"""
@@ -77,137 +51,19 @@ def parse_args(argv):
77 "the email parts") 51 "the email parts")
78 parser.add_argument('--text', 52 parser.add_argument('--text',
79 help="Plain text message") 53 help="Plain text message")
80 parser.add_argument('--html',
81 help="HTML peport generated by oe-build-perf-report")
82 parser.add_argument('--phantomjs-args', action='append',
83 help="Extra command line arguments passed to PhantomJS")
84 54
85 args = parser.parse_args(argv) 55 args = parser.parse_args(argv)
86 56
87 if not args.html and not args.text: 57 if not args.text:
88 parser.error("Please specify --html and/or --text") 58 parser.error("Please specify --text")
89 59
90 return args 60 return args
91 61
92 62
93def decode_png(infile, outfile): 63def send_email(text_fn, subject, recipients, copy=[], blind_copy=[]):
94 """Parse/decode/optimize png data from a html element"""
95 with open(infile) as f:
96 raw_data = f.read()
97
98 # Grab raw base64 data
99 b64_data = re.sub('^.*href="data:image/png;base64,', '', raw_data, 1)
100 b64_data = re.sub('">.+$', '', b64_data, 1)
101
102 # Replace file with proper decoded png
103 with open(outfile, 'wb') as f:
104 f.write(base64.b64decode(b64_data))
105
106 subprocess.check_output(['optipng', outfile], stderr=subprocess.STDOUT)
107
108
109def mangle_html_report(infile, outfile, pngs):
110 """Mangle html file into a email compatible format"""
111 paste = True
112 png_dir = os.path.dirname(outfile)
113 with open(infile) as f_in:
114 with open(outfile, 'w') as f_out:
115 for line in f_in.readlines():
116 stripped = line.strip()
117 # Strip out scripts
118 if stripped == '<!--START-OF-SCRIPTS-->':
119 paste = False
120 elif stripped == '<!--END-OF-SCRIPTS-->':
121 paste = True
122 elif paste:
123 if re.match('^.+href="data:image/png;base64', stripped):
124 # Strip out encoded pngs (as they're huge in size)
125 continue
126 elif 'www.gstatic.com' in stripped:
127 # HACK: drop references to external static pages
128 continue
129
130 # Replace charts with <img> elements
131 match = re.match('<div id="(?P<id>\w+)"', stripped)
132 if match and match.group('id') in pngs:
133 f_out.write('<img src="cid:{}"\n'.format(match.group('id')))
134 else:
135 f_out.write(line)
136
137
138def scrape_html_report(report, outdir, phantomjs_extra_args=None):
139 """Scrape html report into a format sendable by email"""
140 tmpdir = tempfile.mkdtemp(dir='.')
141 log.debug("Using tmpdir %s for phantomjs output", tmpdir)
142
143 if not os.path.isdir(outdir):
144 os.mkdir(outdir)
145 if os.path.splitext(report)[1] not in ('.html', '.htm'):
146 raise ReportError("Invalid file extension for report, needs to be "
147 "'.html' or '.htm'")
148
149 try:
150 log.info("Scraping HTML report with PhangomJS")
151 extra_args = phantomjs_extra_args if phantomjs_extra_args else []
152 subprocess.check_output(['phantomjs', '--debug=true'] + extra_args +
153 [SCRAPE_JS, report, tmpdir],
154 stderr=subprocess.STDOUT)
155
156 pngs = []
157 images = []
158 for fname in os.listdir(tmpdir):
159 base, ext = os.path.splitext(fname)
160 if ext == '.png':
161 log.debug("Decoding %s", fname)
162 decode_png(os.path.join(tmpdir, fname),
163 os.path.join(outdir, fname))
164 pngs.append(base)
165 images.append(fname)
166 elif ext in ('.html', '.htm'):
167 report_file = fname
168 else:
169 log.warning("Unknown file extension: '%s'", ext)
170 #shutil.move(os.path.join(tmpdir, fname), outdir)
171
172 log.debug("Mangling html report file %s", report_file)
173 mangle_html_report(os.path.join(tmpdir, report_file),
174 os.path.join(outdir, report_file), pngs)
175 return (os.path.join(outdir, report_file),
176 [os.path.join(outdir, i) for i in images])
177 finally:
178 shutil.rmtree(tmpdir)
179
180def send_email(text_fn, html_fn, image_fns, subject, recipients, copy=[],
181 blind_copy=[]):
182 """Send email"""
183 # Generate email message 64 # Generate email message
184 text_msg = html_msg = None 65 with open(text_fn) as f:
185 if text_fn: 66 msg = MIMEText("Yocto build performance test report.\n" + f.read(), 'plain')
186 with open(text_fn) as f:
187 text_msg = MIMEText("Yocto build performance test report.\n" +
188 f.read(), 'plain')
189 if html_fn:
190 html_msg = msg = MIMEMultipart('related')
191 with open(html_fn) as f:
192 html_msg.attach(MIMEText(f.read(), 'html'))
193 for img_fn in image_fns:
194 # Expect that content id is same as the filename
195 cid = os.path.splitext(os.path.basename(img_fn))[0]
196 with open(img_fn, 'rb') as f:
197 image_msg = MIMEImage(f.read())
198 image_msg['Content-ID'] = '<{}>'.format(cid)
199 html_msg.attach(image_msg)
200
201 if text_msg and html_msg:
202 msg = MIMEMultipart('alternative')
203 msg.attach(text_msg)
204 msg.attach(html_msg)
205 elif text_msg:
206 msg = text_msg
207 elif html_msg:
208 msg = html_msg
209 else:
210 raise ReportError("Neither plain text nor html body specified")
211 67
212 pw_data = pwd.getpwuid(os.getuid()) 68 pw_data = pwd.getpwuid(os.getuid())
213 full_name = pw_data.pw_gecos.split(',')[0] 69 full_name = pw_data.pw_gecos.split(',')[0]
@@ -234,8 +90,6 @@ def main(argv=None):
234 if args.debug: 90 if args.debug:
235 log.setLevel(logging.DEBUG) 91 log.setLevel(logging.DEBUG)
236 92
237 check_utils()
238
239 if args.outdir: 93 if args.outdir:
240 outdir = args.outdir 94 outdir = args.outdir
241 if not os.path.exists(outdir): 95 if not os.path.exists(outdir):
@@ -245,25 +99,16 @@ def main(argv=None):
245 99
246 try: 100 try:
247 log.debug("Storing email parts in %s", outdir) 101 log.debug("Storing email parts in %s", outdir)
248 html_report = images = None
249 if args.html:
250 html_report, images = scrape_html_report(args.html, outdir,
251 args.phantomjs_args)
252
253 if args.to: 102 if args.to:
254 log.info("Sending email to %s", ', '.join(args.to)) 103 log.info("Sending email to %s", ', '.join(args.to))
255 if args.cc: 104 if args.cc:
256 log.info("Copying to %s", ', '.join(args.cc)) 105 log.info("Copying to %s", ', '.join(args.cc))
257 if args.bcc: 106 if args.bcc:
258 log.info("Blind copying to %s", ', '.join(args.bcc)) 107 log.info("Blind copying to %s", ', '.join(args.bcc))
259 send_email(args.text, html_report, images, args.subject, 108 send_email(args.text, args.subject, args.to, args.cc, args.bcc)
260 args.to, args.cc, args.bcc)
261 except subprocess.CalledProcessError as err: 109 except subprocess.CalledProcessError as err:
262 log.error("%s, with output:\n%s", str(err), err.output.decode()) 110 log.error("%s, with output:\n%s", str(err), err.output.decode())
263 return 1 111 return 1
264 except ReportError as err:
265 log.error(err)
266 return 1
267 finally: 112 finally:
268 if not args.outdir: 113 if not args.outdir:
269 log.debug("Wiping %s", outdir) 114 log.debug("Wiping %s", outdir)
diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py
index 62c509f..dc417b4 100755
--- a/scripts/contrib/patchreview.py
+++ b/scripts/contrib/patchreview.py
@@ -8,7 +8,7 @@
8# - test suite 8# - test suite
9# - validate signed-off-by 9# - validate signed-off-by
10 10
11status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied") 11status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied", "inactive-upstream")
12 12
13class Result: 13class Result:
14 # Whether the patch has an Upstream-Status or not 14 # Whether the patch has an Upstream-Status or not
@@ -46,7 +46,7 @@ def patchreview(path, patches):
46 # hyphen or spaces, maybe a colon, some whitespace, then the value, all case 46 # hyphen or spaces, maybe a colon, some whitespace, then the value, all case
47 # insensitive. 47 # insensitive.
48 sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE) 48 sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE)
49 status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*(\w*)", re.IGNORECASE | re.MULTILINE) 49 status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*([\w-]*)", re.IGNORECASE | re.MULTILINE)
50 cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE) 50 cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE)
51 cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE) 51 cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE)
52 52
diff --git a/scripts/contrib/test_build_time.sh b/scripts/contrib/test_build_time.sh
index 23f238a..4012ac7 100755
--- a/scripts/contrib/test_build_time.sh
+++ b/scripts/contrib/test_build_time.sh
@@ -97,7 +97,7 @@ if [ $? != 0 ] ; then
97 exit 251 97 exit 251
98fi 98fi
99 99
100if [ "$BB_ENV_EXTRAWHITE" != "" ] ; then 100if [ "BB_ENV_PASSTHROUGH_ADDITIONS" != "" ] ; then
101 echo "WARNING: you are running after sourcing the build environment script, this is not recommended" 101 echo "WARNING: you are running after sourcing the build environment script, this is not recommended"
102fi 102fi
103 103
diff --git a/scripts/crosstap b/scripts/crosstap
index 73c8947..5aa72f1 100755
--- a/scripts/crosstap
+++ b/scripts/crosstap
@@ -353,7 +353,7 @@ bitbake workspace.
353 353
354Anything after -- option is passed directly to stap. 354Anything after -- option is passed directly to stap.
355 355
356Legacy script invocation style supported but depreciated: 356Legacy script invocation style supported but deprecated:
357 %prog <user@hostname> <sytemtap-script> [systemtap options] 357 %prog <user@hostname> <sytemtap-script> [systemtap options]
358 358
359To enable most out of systemtap the following site.conf or local.conf 359To enable most out of systemtap the following site.conf or local.conf
diff --git a/scripts/git b/scripts/git
new file mode 100755
index 0000000..644055e
--- /dev/null
+++ b/scripts/git
@@ -0,0 +1,26 @@
1#!/usr/bin/env python3
2#
3# Wrapper around 'git' that doesn't think we are root
4
5import os
6import shutil
7import sys
8
9os.environ['PSEUDO_UNLOAD'] = '1'
10
11# calculate path to the real 'git'
12path = os.environ['PATH']
13# we need to remove our path but also any other copy of this script which
14# may be present, e.g. eSDK.
15replacements = [os.path.dirname(sys.argv[0])]
16for p in path.split(":"):
17 if p.endswith("/scripts"):
18 replacements.append(p)
19for r in replacements:
20 path = path.replace(r, '/ignoreme')
21real_git = shutil.which('git', path=path)
22
23if len(sys.argv) == 1:
24 os.execl(real_git, 'git')
25
26os.execv(real_git, sys.argv)
diff --git a/scripts/install-buildtools b/scripts/install-buildtools
index 8554a5d..10c3d04 100755
--- a/scripts/install-buildtools
+++ b/scripts/install-buildtools
@@ -57,9 +57,9 @@ logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout)
57 57
58DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools') 58DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools')
59DEFAULT_BASE_URL = 'http://downloads.yoctoproject.org/releases/yocto' 59DEFAULT_BASE_URL = 'http://downloads.yoctoproject.org/releases/yocto'
60DEFAULT_RELEASE = 'yocto-3.2_M3' 60DEFAULT_RELEASE = 'yocto-3.4'
61DEFAULT_INSTALLER_VERSION = '3.1+snapshot' 61DEFAULT_INSTALLER_VERSION = '3.4'
62DEFAULT_BUILDDATE = '20200923' 62DEFAULT_BUILDDATE = '202110XX'
63 63
64# Python version sanity check 64# Python version sanity check
65if not (sys.version_info.major == 3 and sys.version_info.minor >= 4): 65if not (sys.version_info.major == 3 and sys.version_info.minor >= 4):
diff --git a/scripts/lib/checklayer/__init__.py b/scripts/lib/checklayer/__init__.py
index e69a10f..aa946f3 100644
--- a/scripts/lib/checklayer/__init__.py
+++ b/scripts/lib/checklayer/__init__.py
@@ -156,6 +156,27 @@ def _find_layer(depend, layers):
156 return layer 156 return layer
157 return None 157 return None
158 158
159def sanity_check_layers(layers, logger):
160 """
161 Check that we didn't find duplicate collection names, as the layer that will
162 be used is non-deterministic. The precise check is duplicate collections
163 with different patterns, as the same pattern being repeated won't cause
164 problems.
165 """
166 import collections
167
168 passed = True
169 seen = collections.defaultdict(set)
170 for layer in layers:
171 for name, data in layer.get("collections", {}).items():
172 seen[name].add(data["pattern"])
173
174 for name, patterns in seen.items():
175 if len(patterns) > 1:
176 passed = False
177 logger.error("Collection %s found multiple times: %s" % (name, ", ".join(patterns)))
178 return passed
179
159def get_layer_dependencies(layer, layers, logger): 180def get_layer_dependencies(layer, layers, logger):
160 def recurse_dependencies(depends, layer, layers, logger, ret = []): 181 def recurse_dependencies(depends, layer, layers, logger, ret = []):
161 logger.debug('Processing dependencies %s for layer %s.' % \ 182 logger.debug('Processing dependencies %s for layer %s.' % \
@@ -261,7 +282,7 @@ def check_command(error_msg, cmd, cwd=None):
261 raise RuntimeError(msg) 282 raise RuntimeError(msg)
262 return output 283 return output
263 284
264def get_signatures(builddir, failsafe=False, machine=None): 285def get_signatures(builddir, failsafe=False, machine=None, extravars=None):
265 import re 286 import re
266 287
267 # some recipes needs to be excluded like meta-world-pkgdata 288 # some recipes needs to be excluded like meta-world-pkgdata
@@ -272,7 +293,10 @@ def get_signatures(builddir, failsafe=False, machine=None):
272 sigs = {} 293 sigs = {}
273 tune2tasks = {} 294 tune2tasks = {}
274 295
275 cmd = 'BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" ' 296 cmd = 'BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" '
297 if extravars:
298 cmd += extravars
299 cmd += ' '
276 if machine: 300 if machine:
277 cmd += 'MACHINE=%s ' % machine 301 cmd += 'MACHINE=%s ' % machine
278 cmd += 'bitbake ' 302 cmd += 'bitbake '
diff --git a/scripts/lib/checklayer/cases/bsp.py b/scripts/lib/checklayer/cases/bsp.py
index 7fd56f5..a80a584 100644
--- a/scripts/lib/checklayer/cases/bsp.py
+++ b/scripts/lib/checklayer/cases/bsp.py
@@ -153,7 +153,7 @@ class BSPCheckLayer(OECheckLayerTestCase):
153 # do_build can be ignored: it is know to have 153 # do_build can be ignored: it is know to have
154 # different signatures in some cases, for example in 154 # different signatures in some cases, for example in
155 # the allarch ca-certificates due to RDEPENDS=openssl. 155 # the allarch ca-certificates due to RDEPENDS=openssl.
156 # That particular dependency is whitelisted via 156 # That particular dependency is marked via
157 # SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS, but still shows up 157 # SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS, but still shows up
158 # in the sstate signature hash because filtering it 158 # in the sstate signature hash because filtering it
159 # out would be hard and running do_build multiple 159 # out would be hard and running do_build multiple
diff --git a/scripts/lib/checklayer/cases/common.py b/scripts/lib/checklayer/cases/common.py
index b82304e..491a139 100644
--- a/scripts/lib/checklayer/cases/common.py
+++ b/scripts/lib/checklayer/cases/common.py
@@ -6,6 +6,7 @@
6import glob 6import glob
7import os 7import os
8import unittest 8import unittest
9import re
9from checklayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures 10from checklayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures
10from checklayer.case import OECheckLayerTestCase 11from checklayer.case import OECheckLayerTestCase
11 12
@@ -14,7 +15,7 @@ class CommonCheckLayer(OECheckLayerTestCase):
14 # The top-level README file may have a suffix (like README.rst or README.txt). 15 # The top-level README file may have a suffix (like README.rst or README.txt).
15 readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*')) 16 readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*'))
16 self.assertTrue(len(readme_files) > 0, 17 self.assertTrue(len(readme_files) > 0,
17 msg="Layer doesn't contains README file.") 18 msg="Layer doesn't contain a README file.")
18 19
19 # There might be more than one file matching the file pattern above 20 # There might be more than one file matching the file pattern above
20 # (for example, README.rst and README-COPYING.rst). The one with the shortest 21 # (for example, README.rst and README-COPYING.rst). The one with the shortest
@@ -26,6 +27,16 @@ class CommonCheckLayer(OECheckLayerTestCase):
26 self.assertTrue(data, 27 self.assertTrue(data,
27 msg="Layer contains a README file but it is empty.") 28 msg="Layer contains a README file but it is empty.")
28 29
30 # If a layer's README references another README, then the checks below are not valid
31 if re.search('README', data, re.IGNORECASE):
32 return
33
34 self.assertIn('maintainer', data.lower())
35 self.assertIn('patch', data.lower())
36 # Check that there is an email address in the README
37 email_regex = re.compile(r"[^@]+@[^@]+")
38 self.assertTrue(email_regex.match(data))
39
29 def test_parse(self): 40 def test_parse(self):
30 check_command('Layer %s failed to parse.' % self.tc.layer['name'], 41 check_command('Layer %s failed to parse.' % self.tc.layer['name'],
31 'bitbake -p') 42 'bitbake -p')
@@ -43,6 +54,21 @@ class CommonCheckLayer(OECheckLayerTestCase):
43 ''' 54 '''
44 get_signatures(self.td['builddir'], failsafe=False) 55 get_signatures(self.td['builddir'], failsafe=False)
45 56
57 def test_world_inherit_class(self):
58 '''
59 This also does "bitbake -S none world" along with inheriting "yocto-check-layer"
60 class, which can do additional per-recipe test cases.
61 '''
62 msg = []
63 try:
64 get_signatures(self.td['builddir'], failsafe=False, machine=None, extravars='BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS INHERIT" INHERIT="yocto-check-layer"')
65 except RuntimeError as ex:
66 msg.append(str(ex))
67 if msg:
68 msg.insert(0, 'Layer %s failed additional checks from yocto-check-layer.bbclass\nSee below log for specific recipe parsing errors:\n' % \
69 self.tc.layer['name'])
70 self.fail('\n'.join(msg))
71
46 def test_signatures(self): 72 def test_signatures(self):
47 if self.tc.layer['type'] == LayerType.SOFTWARE and \ 73 if self.tc.layer['type'] == LayerType.SOFTWARE and \
48 not self.tc.test_software_layer_signatures: 74 not self.tc.test_software_layer_signatures:
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py
index 8333225..e14a587 100644
--- a/scripts/lib/devtool/deploy.py
+++ b/scripts/lib/devtool/deploy.py
@@ -170,7 +170,7 @@ def deploy(args, config, basepath, workspace):
170 srcdir = recipe_outdir 170 srcdir = recipe_outdir
171 recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'devtool-deploy-target-stripped') 171 recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'devtool-deploy-target-stripped')
172 if os.path.isdir(recipe_outdir): 172 if os.path.isdir(recipe_outdir):
173 bb.utils.remove(recipe_outdir, True) 173 exec_fakeroot(rd, "rm -rf %s" % recipe_outdir, shell=True)
174 exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True) 174 exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
175 os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or '']) 175 os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or ''])
176 oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'), 176 oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'),
diff --git a/scripts/lib/devtool/sdk.py b/scripts/lib/devtool/sdk.py
index ae3fc4c..d717b6c 100644
--- a/scripts/lib/devtool/sdk.py
+++ b/scripts/lib/devtool/sdk.py
@@ -207,7 +207,7 @@ def sdk_update(args, config, basepath, workspace):
207 if not sstate_mirrors: 207 if not sstate_mirrors:
208 with open(os.path.join(conf_dir, 'site.conf'), 'a') as f: 208 with open(os.path.join(conf_dir, 'site.conf'), 'a') as f:
209 f.write('SCONF_VERSION = "%s"\n' % site_conf_version) 209 f.write('SCONF_VERSION = "%s"\n' % site_conf_version)
210 f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver) 210 f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH"\n' % updateserver)
211 finally: 211 finally:
212 shutil.rmtree(tmpsdk_dir) 212 shutil.rmtree(tmpsdk_dir)
213 213
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py
index 826a3f9..0357ec0 100644
--- a/scripts/lib/devtool/upgrade.py
+++ b/scripts/lib/devtool/upgrade.py
@@ -192,14 +192,15 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
192 get_branch = [x.strip() for x in check_branch.splitlines()] 192 get_branch = [x.strip() for x in check_branch.splitlines()]
193 # Remove HEAD reference point and drop remote prefix 193 # Remove HEAD reference point and drop remote prefix
194 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] 194 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
195 if 'master' in get_branch: 195 if len(get_branch) == 1:
196 # If it is master, we do not need to append 'branch=master' as this is default. 196 # If srcrev is on only ONE branch, then use that branch
197 # Even with the case where get_branch has multiple objects, if 'master' is one
198 # of them, we should default take from 'master'
199 srcbranch = ''
200 elif len(get_branch) == 1:
201 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch'
202 srcbranch = get_branch[0] 197 srcbranch = get_branch[0]
198 elif 'main' in get_branch:
199 # If srcrev is on multiple branches, then choose 'main' if it is one of them
200 srcbranch = 'main'
201 elif 'master' in get_branch:
202 # Otherwise choose 'master' if it is one of the branches
203 srcbranch = 'master'
203 else: 204 else:
204 # If get_branch contains more than one objects, then display error and exit. 205 # If get_branch contains more than one objects, then display error and exit.
205 mbrch = '\n ' + '\n '.join(get_branch) 206 mbrch = '\n ' + '\n '.join(get_branch)
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py
index b6c4564..824ac63 100644
--- a/scripts/lib/recipetool/create.py
+++ b/scripts/lib/recipetool/create.py
@@ -366,7 +366,7 @@ def supports_srcrev(uri):
366def reformat_git_uri(uri): 366def reformat_git_uri(uri):
367 '''Convert any http[s]://....git URI into git://...;protocol=http[s]''' 367 '''Convert any http[s]://....git URI into git://...;protocol=http[s]'''
368 checkuri = uri.split(';', 1)[0] 368 checkuri = uri.split(';', 1)[0]
369 if checkuri.endswith('.git') or '/git/' in checkuri or re.match('https?://github.com/[^/]+/[^/]+/?$', checkuri): 369 if checkuri.endswith('.git') or '/git/' in checkuri or re.match('https?://git(hub|lab).com/[^/]+/[^/]+/?$', checkuri):
370 # Appends scheme if the scheme is missing 370 # Appends scheme if the scheme is missing
371 if not '://' in uri: 371 if not '://' in uri:
372 uri = 'git://' + uri 372 uri = 'git://' + uri
@@ -389,9 +389,6 @@ def reformat_git_uri(uri):
389 parms.update({('protocol', 'ssh')}) 389 parms.update({('protocol', 'ssh')})
390 elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms): 390 elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms):
391 parms.update({('protocol', scheme)}) 391 parms.update({('protocol', scheme)})
392 # We assume 'master' branch if not set
393 if not 'branch' in parms:
394 parms.update({('branch', 'master')})
395 # Always append 'git://' 392 # Always append 'git://'
396 fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms)) 393 fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms))
397 return fUrl 394 return fUrl
@@ -438,7 +435,7 @@ def create_recipe(args):
438 if args.binary: 435 if args.binary:
439 # Assume the archive contains the directory structure verbatim 436 # Assume the archive contains the directory structure verbatim
440 # so we need to extract to a subdirectory 437 # so we need to extract to a subdirectory
441 fetchuri += ';subdir=${BP}' 438 fetchuri += ';subdir=${BPN}'
442 srcuri = fetchuri 439 srcuri = fetchuri
443 rev_re = re.compile(';rev=([^;]+)') 440 rev_re = re.compile(';rev=([^;]+)')
444 res = rev_re.search(srcuri) 441 res = rev_re.search(srcuri)
@@ -481,6 +478,9 @@ def create_recipe(args):
481 storeTagName = params['tag'] 478 storeTagName = params['tag']
482 params['nobranch'] = '1' 479 params['nobranch'] = '1'
483 del params['tag'] 480 del params['tag']
481 # Assume 'master' branch if not set
482 if scheme in ['git', 'gitsm'] and 'branch' not in params and 'nobranch' not in params:
483 params['branch'] = 'master'
484 fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) 484 fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
485 485
486 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') 486 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
@@ -530,10 +530,9 @@ def create_recipe(args):
530 # Remove HEAD reference point and drop remote prefix 530 # Remove HEAD reference point and drop remote prefix
531 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] 531 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
532 if 'master' in get_branch: 532 if 'master' in get_branch:
533 # If it is master, we do not need to append 'branch=master' as this is default.
534 # Even with the case where get_branch has multiple objects, if 'master' is one 533 # Even with the case where get_branch has multiple objects, if 'master' is one
535 # of them, we should default take from 'master' 534 # of them, we should default take from 'master'
536 srcbranch = '' 535 srcbranch = 'master'
537 elif len(get_branch) == 1: 536 elif len(get_branch) == 1:
538 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' 537 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch'
539 srcbranch = get_branch[0] 538 srcbranch = get_branch[0]
@@ -546,8 +545,8 @@ def create_recipe(args):
546 # Since we might have a value in srcbranch, we need to 545 # Since we might have a value in srcbranch, we need to
547 # recontruct the srcuri to include 'branch' in params. 546 # recontruct the srcuri to include 'branch' in params.
548 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri) 547 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri)
549 if srcbranch: 548 if scheme in ['git', 'gitsm']:
550 params['branch'] = srcbranch 549 params['branch'] = srcbranch or 'master'
551 550
552 if storeTagName and scheme in ['git', 'gitsm']: 551 if storeTagName and scheme in ['git', 'gitsm']:
553 # Check srcrev using tag and check validity of the tag 552 # Check srcrev using tag and check validity of the tag
@@ -606,7 +605,7 @@ def create_recipe(args):
606 splitline = line.split() 605 splitline = line.split()
607 if len(splitline) > 1: 606 if len(splitline) > 1:
608 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): 607 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]):
609 srcuri = reformat_git_uri(splitline[1]) 608 srcuri = reformat_git_uri(splitline[1]) + ';branch=master'
610 srcsubdir = 'git' 609 srcsubdir = 'git'
611 break 610 break
612 611
@@ -920,6 +919,22 @@ def split_value(value):
920 else: 919 else:
921 return value 920 return value
922 921
922def fixup_license(value):
923 # Ensure licenses with OR starts and ends with brackets
924 if '|' in value:
925 return '(' + value + ')'
926 return value
927
928def tidy_licenses(value):
929 """Flat, split and sort licenses"""
930 from oe.license import flattened_licenses
931 def _choose(a, b):
932 str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold)
933 return ["(%s | %s)" % (str_a, str_b)]
934 if not isinstance(value, str):
935 value = " & ".join(value)
936 return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold)
937
923def handle_license_vars(srctree, lines_before, handled, extravalues, d): 938def handle_license_vars(srctree, lines_before, handled, extravalues, d):
924 lichandled = [x for x in handled if x[0] == 'license'] 939 lichandled = [x for x in handled if x[0] == 'license']
925 if lichandled: 940 if lichandled:
@@ -933,10 +948,13 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
933 lines = [] 948 lines = []
934 if licvalues: 949 if licvalues:
935 for licvalue in licvalues: 950 for licvalue in licvalues:
936 if not licvalue[0] in licenses: 951 license = licvalue[0]
937 licenses.append(licvalue[0]) 952 lics = tidy_licenses(fixup_license(license))
953 lics = [lic for lic in lics if lic not in licenses]
954 if len(lics):
955 licenses.extend(lics)
938 lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2])) 956 lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2]))
939 if licvalue[0] == 'Unknown': 957 if license == 'Unknown':
940 lic_unknown.append(licvalue[1]) 958 lic_unknown.append(licvalue[1])
941 if lic_unknown: 959 if lic_unknown:
942 lines.append('#') 960 lines.append('#')
@@ -945,9 +963,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
945 for licfile in lic_unknown: 963 for licfile in lic_unknown:
946 lines.append('# %s' % licfile) 964 lines.append('# %s' % licfile)
947 965
948 extra_license = split_value(extravalues.pop('LICENSE', [])) 966 extra_license = tidy_licenses(extravalues.pop('LICENSE', ''))
949 if '&' in extra_license:
950 extra_license.remove('&')
951 if extra_license: 967 if extra_license:
952 if licenses == ['Unknown']: 968 if licenses == ['Unknown']:
953 licenses = extra_license 969 licenses = extra_license
@@ -988,7 +1004,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
988 lines.append('# instead of &. If there is any doubt, check the accompanying documentation') 1004 lines.append('# instead of &. If there is any doubt, check the accompanying documentation')
989 lines.append('# to determine which situation is applicable.') 1005 lines.append('# to determine which situation is applicable.')
990 1006
991 lines.append('LICENSE = "%s"' % ' & '.join(licenses)) 1007 lines.append('LICENSE = "%s"' % ' & '.join(sorted(licenses, key=str.casefold)))
992 lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum)) 1008 lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum))
993 lines.append('') 1009 lines.append('')
994 1010
@@ -1064,7 +1080,7 @@ def crunch_license(licfile):
1064 1080
1065 # common licenses 1081 # common licenses
1066 crunched_md5sums['89f3bf322f30a1dcfe952e09945842f0'] = 'Apache-2.0' 1082 crunched_md5sums['89f3bf322f30a1dcfe952e09945842f0'] = 'Apache-2.0'
1067 crunched_md5sums['13b6fe3075f8f42f2270a748965bf3a1'] = 'BSD-0-Clause' 1083 crunched_md5sums['13b6fe3075f8f42f2270a748965bf3a1'] = '0BSD'
1068 crunched_md5sums['ba87a7d7c20719c8df4b8beed9b78c43'] = 'BSD-2-Clause' 1084 crunched_md5sums['ba87a7d7c20719c8df4b8beed9b78c43'] = 'BSD-2-Clause'
1069 crunched_md5sums['7f8892c03b72de419c27be4ebfa253f8'] = 'BSD-3-Clause' 1085 crunched_md5sums['7f8892c03b72de419c27be4ebfa253f8'] = 'BSD-3-Clause'
1070 crunched_md5sums['21128c0790b23a8a9f9e260d5f6b3619'] = 'BSL-1.0' 1086 crunched_md5sums['21128c0790b23a8a9f9e260d5f6b3619'] = 'BSL-1.0'
@@ -1092,15 +1108,15 @@ def crunch_license(licfile):
1092 # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt 1108 # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt
1093 crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause' 1109 crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause'
1094 # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE 1110 # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE
1095 crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPLv2' 1111 crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPL-2.0-only'
1096 # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt 1112 # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
1097 crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPLv2' 1113 crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPL-2.0-only'
1098 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1 1114 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1
1099 crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPLv2.1' 1115 crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPL-2.1-only'
1100 # unixODBC-2.3.4 COPYING 1116 # unixODBC-2.3.4 COPYING
1101 crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPLv2.1' 1117 crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPL-2.1-only'
1102 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3 1118 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3
1103 crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPLv3' 1119 crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPL-3.0-only'
1104 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10 1120 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10
1105 crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0' 1121 crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0'
1106 1122
@@ -1202,7 +1218,7 @@ def guess_license(srctree, d):
1202 fullpath = os.path.join(root, fn) 1218 fullpath = os.path.join(root, fn)
1203 if not fullpath in licfiles: 1219 if not fullpath in licfiles:
1204 licfiles.append(fullpath) 1220 licfiles.append(fullpath)
1205 for licfile in licfiles: 1221 for licfile in sorted(licfiles):
1206 md5value = bb.utils.md5_file(licfile) 1222 md5value = bb.utils.md5_file(licfile)
1207 license = md5sums.get(md5value, None) 1223 license = md5sums.get(md5value, None)
1208 if not license: 1224 if not license:
@@ -1219,7 +1235,7 @@ def guess_license(srctree, d):
1219 1235
1220 return licenses 1236 return licenses
1221 1237
1222def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=[], pn='${PN}'): 1238def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'):
1223 """ 1239 """
1224 Given a list of (license, path, md5sum) as returned by guess_license(), 1240 Given a list of (license, path, md5sum) as returned by guess_license(),
1225 a dict of package name to path mappings, write out a set of 1241 a dict of package name to path mappings, write out a set of
@@ -1227,6 +1243,7 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=[], pn='
1227 """ 1243 """
1228 pkglicenses = {pn: []} 1244 pkglicenses = {pn: []}
1229 for license, licpath, _ in licvalues: 1245 for license, licpath, _ in licvalues:
1246 license = fixup_license(license)
1230 for pkgname, pkgpath in packages.items(): 1247 for pkgname, pkgpath in packages.items():
1231 if licpath.startswith(pkgpath + '/'): 1248 if licpath.startswith(pkgpath + '/'):
1232 if pkgname in pkglicenses: 1249 if pkgname in pkglicenses:
@@ -1239,11 +1256,14 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=[], pn='
1239 pkglicenses[pn].append(license) 1256 pkglicenses[pn].append(license)
1240 outlicenses = {} 1257 outlicenses = {}
1241 for pkgname in packages: 1258 for pkgname in packages:
1242 license = ' '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown' 1259 # Assume AND operator between license files
1243 if license == 'Unknown' and pkgname in fallback_licenses: 1260 license = ' & '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown'
1261 if license == 'Unknown' and fallback_licenses and pkgname in fallback_licenses:
1244 license = fallback_licenses[pkgname] 1262 license = fallback_licenses[pkgname]
1263 licenses = tidy_licenses(license)
1264 license = ' & '.join(licenses)
1245 outlines.append('LICENSE:%s = "%s"' % (pkgname, license)) 1265 outlines.append('LICENSE:%s = "%s"' % (pkgname, license))
1246 outlicenses[pkgname] = license.split() 1266 outlicenses[pkgname] = licenses
1247 return outlicenses 1267 return outlicenses
1248 1268
1249def read_pkgconfig_provides(d): 1269def read_pkgconfig_provides(d):
diff --git a/scripts/lib/recipetool/create_buildsys.py b/scripts/lib/recipetool/create_buildsys.py
index 35a97c9..5015634 100644
--- a/scripts/lib/recipetool/create_buildsys.py
+++ b/scripts/lib/recipetool/create_buildsys.py
@@ -545,7 +545,7 @@ class AutotoolsRecipeHandler(RecipeHandler):
545 deps.append('zlib') 545 deps.append('zlib')
546 elif keyword in ('AX_CHECK_OPENSSL', 'AX_LIB_CRYPTO'): 546 elif keyword in ('AX_CHECK_OPENSSL', 'AX_LIB_CRYPTO'):
547 deps.append('openssl') 547 deps.append('openssl')
548 elif keyword == 'AX_LIB_CURL': 548 elif keyword in ('AX_LIB_CURL', 'LIBCURL_CHECK_CONFIG'):
549 deps.append('curl') 549 deps.append('curl')
550 elif keyword == 'AX_LIB_BEECRYPT': 550 elif keyword == 'AX_LIB_BEECRYPT':
551 deps.append('beecrypt') 551 deps.append('beecrypt')
@@ -624,6 +624,7 @@ class AutotoolsRecipeHandler(RecipeHandler):
624 'AX_CHECK_OPENSSL', 624 'AX_CHECK_OPENSSL',
625 'AX_LIB_CRYPTO', 625 'AX_LIB_CRYPTO',
626 'AX_LIB_CURL', 626 'AX_LIB_CURL',
627 'LIBCURL_CHECK_CONFIG',
627 'AX_LIB_BEECRYPT', 628 'AX_LIB_BEECRYPT',
628 'AX_LIB_EXPAT', 629 'AX_LIB_EXPAT',
629 'AX_LIB_GCRYPT', 630 'AX_LIB_GCRYPT',
diff --git a/scripts/lib/recipetool/create_buildsys_python.py b/scripts/lib/recipetool/create_buildsys_python.py
index 0b6b042..f4f51c8 100644
--- a/scripts/lib/recipetool/create_buildsys_python.py
+++ b/scripts/lib/recipetool/create_buildsys_python.py
@@ -8,7 +8,7 @@
8import ast 8import ast
9import codecs 9import codecs
10import collections 10import collections
11import distutils.command.build_py 11import setuptools.command.build_py
12import email 12import email
13import imp 13import imp
14import glob 14import glob
@@ -102,29 +102,38 @@ class PythonRecipeHandler(RecipeHandler):
102 'License :: OSI Approved :: Artistic License': 'Artistic', 102 'License :: OSI Approved :: Artistic License': 'Artistic',
103 'License :: OSI Approved :: Attribution Assurance License': 'AAL', 103 'License :: OSI Approved :: Attribution Assurance License': 'AAL',
104 'License :: OSI Approved :: BSD License': 'BSD-3-Clause', 104 'License :: OSI Approved :: BSD License': 'BSD-3-Clause',
105 'License :: OSI Approved :: Boost Software License 1.0 (BSL-1.0)': 'BSL-1.0',
106 'License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)': 'CECILL-2.1',
107 'License :: OSI Approved :: Common Development and Distribution License 1.0 (CDDL-1.0)': 'CDDL-1.0',
105 'License :: OSI Approved :: Common Public License': 'CPL', 108 'License :: OSI Approved :: Common Public License': 'CPL',
109 'License :: OSI Approved :: Eclipse Public License 1.0 (EPL-1.0)': 'EPL-1.0',
110 'License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)': 'EPL-2.0',
106 'License :: OSI Approved :: Eiffel Forum License': 'EFL', 111 'License :: OSI Approved :: Eiffel Forum License': 'EFL',
107 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0', 112 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0',
108 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1', 113 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1',
109 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0+', 114 'License :: OSI Approved :: European Union Public Licence 1.2 (EUPL 1.2)': 'EUPL-1.2',
110 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0', 115 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0-only',
116 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0-or-later',
111 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL', 117 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL',
112 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL', 118 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL',
113 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0', 119 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0-only',
114 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0+', 120 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0-or-later',
115 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0', 121 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0-only',
116 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0+', 122 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0-or-later',
117 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0', 123 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0-only',
118 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0+', 124 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0-or-later',
119 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0', 125 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0-only',
120 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0+', 126 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0-or-later',
121 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL', 127 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL',
128 'License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)': 'HPND',
122 'License :: OSI Approved :: IBM Public License': 'IPL', 129 'License :: OSI Approved :: IBM Public License': 'IPL',
123 'License :: OSI Approved :: ISC License (ISCL)': 'ISC', 130 'License :: OSI Approved :: ISC License (ISCL)': 'ISC',
124 'License :: OSI Approved :: Intel Open Source License': 'Intel', 131 'License :: OSI Approved :: Intel Open Source License': 'Intel',
125 'License :: OSI Approved :: Jabber Open Source License': 'Jabber', 132 'License :: OSI Approved :: Jabber Open Source License': 'Jabber',
126 'License :: OSI Approved :: MIT License': 'MIT', 133 'License :: OSI Approved :: MIT License': 'MIT',
134 'License :: OSI Approved :: MIT No Attribution License (MIT-0)': 'MIT-0',
127 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL', 135 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL',
136 'License :: OSI Approved :: MirOS License (MirOS)': 'MirOS',
128 'License :: OSI Approved :: Motosoto License': 'Motosoto', 137 'License :: OSI Approved :: Motosoto License': 'Motosoto',
129 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0', 138 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0',
130 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1', 139 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1',
@@ -132,19 +141,26 @@ class PythonRecipeHandler(RecipeHandler):
132 'License :: OSI Approved :: Nethack General Public License': 'NGPL', 141 'License :: OSI Approved :: Nethack General Public License': 'NGPL',
133 'License :: OSI Approved :: Nokia Open Source License': 'Nokia', 142 'License :: OSI Approved :: Nokia Open Source License': 'Nokia',
134 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL', 143 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL',
144 'License :: OSI Approved :: Open Software License 3.0 (OSL-3.0)': 'OSL-3.0',
145 'License :: OSI Approved :: PostgreSQL License': 'PostgreSQL',
135 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python', 146 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python',
136 'License :: OSI Approved :: Python Software Foundation License': 'PSF', 147 'License :: OSI Approved :: Python Software Foundation License': 'PSF-2.0',
137 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL', 148 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL',
138 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL', 149 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL',
150 'License :: OSI Approved :: SIL Open Font License 1.1 (OFL-1.1)': 'OFL-1.1',
139 'License :: OSI Approved :: Sleepycat License': 'Sleepycat', 151 'License :: OSI Approved :: Sleepycat License': 'Sleepycat',
140 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': '-- Sun Industry Standards Source License (SISSL)', 152 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': 'SISSL',
141 'License :: OSI Approved :: Sun Public License': 'SPL', 153 'License :: OSI Approved :: Sun Public License': 'SPL',
154 'License :: OSI Approved :: The Unlicense (Unlicense)': 'Unlicense',
155 'License :: OSI Approved :: Universal Permissive License (UPL)': 'UPL-1.0',
142 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA', 156 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA',
143 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0', 157 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0',
144 'License :: OSI Approved :: W3C License': 'W3C', 158 'License :: OSI Approved :: W3C License': 'W3C',
145 'License :: OSI Approved :: X.Net License': 'Xnet', 159 'License :: OSI Approved :: X.Net License': 'Xnet',
146 'License :: OSI Approved :: Zope Public License': 'ZPL', 160 'License :: OSI Approved :: Zope Public License': 'ZPL',
147 'License :: OSI Approved :: zlib/libpng License': 'Zlib', 161 'License :: OSI Approved :: zlib/libpng License': 'Zlib',
162 'License :: Other/Proprietary License': 'Proprietary',
163 'License :: Public Domain': 'PD',
148 } 164 }
149 165
150 def __init__(self): 166 def __init__(self):
@@ -459,9 +475,13 @@ class PythonRecipeHandler(RecipeHandler):
459 else: 475 else:
460 package_dir = {} 476 package_dir = {}
461 477
462 class PackageDir(distutils.command.build_py.build_py): 478 dist = setuptools.Distribution()
479
480 class PackageDir(setuptools.command.build_py.build_py):
463 def __init__(self, package_dir): 481 def __init__(self, package_dir):
464 self.package_dir = package_dir 482 self.package_dir = package_dir
483 self.dist = dist
484 super().__init__(self.dist)
465 485
466 pd = PackageDir(package_dir) 486 pd = PackageDir(package_dir)
467 to_scan = [] 487 to_scan = []
diff --git a/scripts/lib/recipetool/licenses.csv b/scripts/lib/recipetool/licenses.csv
index 0d3fb06..8085111 100644
--- a/scripts/lib/recipetool/licenses.csv
+++ b/scripts/lib/recipetool/licenses.csv
@@ -1,37 +1,37 @@
10636e73ff0215e8d672dc4c32c317bb3,GPLv2 10636e73ff0215e8d672dc4c32c317bb3,GPL-2.0-only
212f884d2ae1ff87c09e5b7ccc2c4ca7e,GPLv2 212f884d2ae1ff87c09e5b7ccc2c4ca7e,GPL-2.0-only
318810669f13b87348459e611d31ab760,GPLv2 318810669f13b87348459e611d31ab760,GPL-2.0-only
4252890d9eee26aab7b432e8b8a616475,LGPLv2 4252890d9eee26aab7b432e8b8a616475,LGPL-2.0-only
52d5025d4aa3495befef8f17206a5b0a1,LGPLv2.1 52d5025d4aa3495befef8f17206a5b0a1,LGPL-2.1-only
63214f080875748938ba060314b4f727d,LGPLv2 63214f080875748938ba060314b4f727d,LGPL-2.0-only
7385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0 7385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0-only
8393a5ca445f6965873eca0259a17f833,GPLv2 8393a5ca445f6965873eca0259a17f833,GPL-2.0-only
93b83ef96387f14655fc854ddc3c6bd57,Apache-2.0 93b83ef96387f14655fc854ddc3c6bd57,Apache-2.0
103bf50002aefd002f49e7bb854063f7e7,LGPLv2 103bf50002aefd002f49e7bb854063f7e7,LGPL-2.0-only
114325afd396febcb659c36b49533135d4,GPLv2 114325afd396febcb659c36b49533135d4,GPL-2.0-only
124fbd65380cdd255951079008b364516c,LGPLv2.1 124fbd65380cdd255951079008b364516c,LGPL-2.1-only
1354c7042be62e169199200bc6477f04d1,BSD-3-Clause 1354c7042be62e169199200bc6477f04d1,BSD-3-Clause
1455ca817ccb7d5b5b66355690e9abc605,LGPLv2 1455ca817ccb7d5b5b66355690e9abc605,LGPL-2.0-only
1559530bdf33659b29e73d4adb9f9f6552,GPLv2 1559530bdf33659b29e73d4adb9f9f6552,GPL-2.0-only
165f30f0716dfdd0d91eb439ebec522ec2,LGPLv2 165f30f0716dfdd0d91eb439ebec522ec2,LGPL-2.0-only
176a6a8e020838b23406c81b19c1d46df6,LGPLv3 176a6a8e020838b23406c81b19c1d46df6,LGPL-3.0-only
18751419260aa954499f7abaabaa882bbe,GPLv2 18751419260aa954499f7abaabaa882bbe,GPL-2.0-only
197fbc338309ac38fefcd64b04bb903e34,LGPLv2.1 197fbc338309ac38fefcd64b04bb903e34,LGPL-2.1-only
208ca43cbc842c2336e835926c2166c28b,GPLv2 208ca43cbc842c2336e835926c2166c28b,GPL-2.0-only
2194d55d512a9ba36caa9b7df079bae19f,GPLv2 2194d55d512a9ba36caa9b7df079bae19f,GPL-2.0-only
229ac2e7cff1ddaf48b6eab6028f23ef88,GPLv2 229ac2e7cff1ddaf48b6eab6028f23ef88,GPL-2.0-only
239f604d8a4f8e74f4f5140845a21b6674,LGPLv2 239f604d8a4f8e74f4f5140845a21b6674,LGPL-2.0-only
24a6f89e2100d9b6cdffcea4f398e37343,LGPLv2.1 24a6f89e2100d9b6cdffcea4f398e37343,LGPL-2.1-only
25b234ee4d69f5fce4486a80fdaf4a4263,GPLv2 25b234ee4d69f5fce4486a80fdaf4a4263,GPL-2.0-only
26bbb461211a33b134d42ed5ee802b37ff,LGPLv2.1 26bbb461211a33b134d42ed5ee802b37ff,LGPL-2.1-only
27bfe1f75d606912a4111c90743d6c7325,MPL-1.1 27bfe1f75d606912a4111c90743d6c7325,MPL-1.1-only
28c93c0550bd3173f4504b2cbd8991e50b,GPLv2 28c93c0550bd3173f4504b2cbd8991e50b,GPL-2.0-only
29d32239bcb673463ab874e80d47fae504,GPLv3 29d32239bcb673463ab874e80d47fae504,GPL-3.0-only
30d7810fab7487fb0aad327b76f1be7cd7,GPLv2 30d7810fab7487fb0aad327b76f1be7cd7,GPL-2.0-only
31d8045f3b8f929c1cb29a1e3fd737b499,LGPLv2.1 31d8045f3b8f929c1cb29a1e3fd737b499,LGPL-2.1-only
32db979804f025cf55aabec7129cb671ed,LGPLv2 32db979804f025cf55aabec7129cb671ed,LGPL-2.0-only
33eb723b61539feef013de476e68b5c50a,GPLv2 33eb723b61539feef013de476e68b5c50a,GPL-2.0-only
34ebb5c50ab7cab4baeffba14977030c07,GPLv2 34ebb5c50ab7cab4baeffba14977030c07,GPL-2.0-only
35f27defe1e96c2e1ecd4e0c9be8967949,GPLv3 35f27defe1e96c2e1ecd4e0c9be8967949,GPL-3.0-only
36fad9b3332be894bab9bc501572864b29,LGPLv2.1 36fad9b3332be894bab9bc501572864b29,LGPL-2.1-only
37fbc093901857fcd118f065f900982c24,LGPLv2.1 37fbc093901857fcd118f065f900982c24,LGPL-2.1-only
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py
index 3164171..adf8147 100644
--- a/scripts/lib/scriptutils.py
+++ b/scripts/lib/scriptutils.py
@@ -18,7 +18,8 @@ import sys
18import tempfile 18import tempfile
19import threading 19import threading
20import importlib 20import importlib
21from importlib import machinery 21import importlib.machinery
22import importlib.util
22 23
23class KeepAliveStreamHandler(logging.StreamHandler): 24class KeepAliveStreamHandler(logging.StreamHandler):
24 def __init__(self, keepalive=True, **kwargs): 25 def __init__(self, keepalive=True, **kwargs):
@@ -82,7 +83,9 @@ def load_plugins(logger, plugins, pluginpath):
82 logger.debug('Loading plugin %s' % name) 83 logger.debug('Loading plugin %s' % name)
83 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) 84 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
84 if spec: 85 if spec:
85 return spec.loader.load_module() 86 mod = importlib.util.module_from_spec(spec)
87 spec.loader.exec_module(mod)
88 return mod
86 89
87 def plugin_name(filename): 90 def plugin_name(filename):
88 return os.path.splitext(os.path.basename(filename))[0] 91 return os.path.splitext(os.path.basename(filename))[0]
@@ -176,6 +179,7 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr
176 f.write('BB_STRICT_CHECKSUM = "ignore"\n') 179 f.write('BB_STRICT_CHECKSUM = "ignore"\n')
177 f.write('SRC_URI = "%s"\n' % srcuri) 180 f.write('SRC_URI = "%s"\n' % srcuri)
178 f.write('SRCREV = "%s"\n' % srcrev) 181 f.write('SRCREV = "%s"\n' % srcrev)
182 f.write('PV = "0.0+${SRCPV}"\n')
179 f.write('WORKDIR = "%s"\n' % tmpworkdir) 183 f.write('WORKDIR = "%s"\n' % tmpworkdir)
180 # Set S out of the way so it doesn't get created under the workdir 184 # Set S out of the way so it doesn't get created under the workdir
181 f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) 185 f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc'))
diff --git a/scripts/lib/wic/help.py b/scripts/lib/wic/help.py
index 4ff7470..73e3380 100644
--- a/scripts/lib/wic/help.py
+++ b/scripts/lib/wic/help.py
@@ -940,6 +940,12 @@ DESCRIPTION
940 quotes. If not specified, the default string is 940 quotes. If not specified, the default string is
941 "defaults". 941 "defaults".
942 942
943 --fspassno: Specifies the order in which filesystem checks are done
944 at boot time by fsck. See fs_passno parameter of
945 fstab(5). This parameter will be copied into the
946 /etc/fstab file of the installed system. If not
947 specified the default value of "0" will be used.
948
943 --label label: Specifies the label to give to the filesystem 949 --label label: Specifies the label to give to the filesystem
944 to be made on the partition. If the given 950 to be made on the partition. If the given
945 label is already in use by another filesystem, 951 label is already in use by another filesystem,
diff --git a/scripts/lib/wic/ksparser.py b/scripts/lib/wic/ksparser.py
index 0df9eb0..a49b7b9 100644
--- a/scripts/lib/wic/ksparser.py
+++ b/scripts/lib/wic/ksparser.py
@@ -155,6 +155,7 @@ class KickStart():
155 part.add_argument('--change-directory') 155 part.add_argument('--change-directory')
156 part.add_argument("--extra-space", type=sizetype("M")) 156 part.add_argument("--extra-space", type=sizetype("M"))
157 part.add_argument('--fsoptions', dest='fsopts') 157 part.add_argument('--fsoptions', dest='fsopts')
158 part.add_argument('--fspassno', dest='fspassno')
158 part.add_argument('--fstype', default='vfat', 159 part.add_argument('--fstype', default='vfat',
159 choices=('ext2', 'ext3', 'ext4', 'btrfs', 160 choices=('ext2', 'ext3', 'ext4', 'btrfs',
160 'squashfs', 'vfat', 'msdos', 'erofs', 161 'squashfs', 'vfat', 'msdos', 'erofs',
diff --git a/scripts/lib/wic/misc.py b/scripts/lib/wic/misc.py
index 3e11822..3bc165f 100644
--- a/scripts/lib/wic/misc.py
+++ b/scripts/lib/wic/misc.py
@@ -36,6 +36,7 @@ NATIVE_RECIPES = {"bmaptool": "bmap-tools",
36 "mkdosfs": "dosfstools", 36 "mkdosfs": "dosfstools",
37 "mkisofs": "cdrtools", 37 "mkisofs": "cdrtools",
38 "mkfs.btrfs": "btrfs-tools", 38 "mkfs.btrfs": "btrfs-tools",
39 "mkfs.erofs": "erofs-utils",
39 "mkfs.ext2": "e2fsprogs", 40 "mkfs.ext2": "e2fsprogs",
40 "mkfs.ext3": "e2fsprogs", 41 "mkfs.ext3": "e2fsprogs",
41 "mkfs.ext4": "e2fsprogs", 42 "mkfs.ext4": "e2fsprogs",
diff --git a/scripts/lib/wic/partition.py b/scripts/lib/wic/partition.py
index a258340..e50871b8 100644
--- a/scripts/lib/wic/partition.py
+++ b/scripts/lib/wic/partition.py
@@ -33,6 +33,7 @@ class Partition():
33 self.include_path = args.include_path 33 self.include_path = args.include_path
34 self.change_directory = args.change_directory 34 self.change_directory = args.change_directory
35 self.fsopts = args.fsopts 35 self.fsopts = args.fsopts
36 self.fspassno = args.fspassno
36 self.fstype = args.fstype 37 self.fstype = args.fstype
37 self.label = args.label 38 self.label = args.label
38 self.use_label = args.use_label 39 self.use_label = args.use_label
@@ -171,7 +172,7 @@ class Partition():
171 # Split sourceparams string of the form key1=val1[,key2=val2,...] 172 # Split sourceparams string of the form key1=val1[,key2=val2,...]
172 # into a dict. Also accepts valueless keys i.e. without = 173 # into a dict. Also accepts valueless keys i.e. without =
173 splitted = self.sourceparams.split(',') 174 splitted = self.sourceparams.split(',')
174 srcparams_dict = dict(par.split('=', 1) for par in splitted if par) 175 srcparams_dict = dict((par.split('=', 1) + [None])[:2] for par in splitted if par)
175 176
176 plugin = PluginMgr.get_plugins('source')[self.source] 177 plugin = PluginMgr.get_plugins('source')[self.source]
177 plugin.do_configure_partition(self, srcparams_dict, creator, 178 plugin.do_configure_partition(self, srcparams_dict, creator,
diff --git a/scripts/lib/wic/pluginbase.py b/scripts/lib/wic/pluginbase.py
index d9b4e57..b645683 100644
--- a/scripts/lib/wic/pluginbase.py
+++ b/scripts/lib/wic/pluginbase.py
@@ -9,9 +9,11 @@ __all__ = ['ImagerPlugin', 'SourcePlugin']
9 9
10import os 10import os
11import logging 11import logging
12import types
12 13
13from collections import defaultdict 14from collections import defaultdict
14from importlib.machinery import SourceFileLoader 15import importlib
16import importlib.util
15 17
16from wic import WicError 18from wic import WicError
17from wic.misc import get_bitbake_var 19from wic.misc import get_bitbake_var
@@ -54,7 +56,9 @@ class PluginMgr:
54 mname = fname[:-3] 56 mname = fname[:-3]
55 mpath = os.path.join(ppath, fname) 57 mpath = os.path.join(ppath, fname)
56 logger.debug("loading plugin module %s", mpath) 58 logger.debug("loading plugin module %s", mpath)
57 SourceFileLoader(mname, mpath).load_module() 59 spec = importlib.util.spec_from_file_location(mname, mpath)
60 module = importlib.util.module_from_spec(spec)
61 spec.loader.exec_module(module)
58 62
59 return PLUGINS.get(ptype) 63 return PLUGINS.get(ptype)
60 64
diff --git a/scripts/lib/wic/plugins/imager/direct.py b/scripts/lib/wic/plugins/imager/direct.py
index 35fff7c..da483da 100644
--- a/scripts/lib/wic/plugins/imager/direct.py
+++ b/scripts/lib/wic/plugins/imager/direct.py
@@ -138,8 +138,9 @@ class DirectPlugin(ImagerPlugin):
138 device_name = "/dev/%s%s%d" % (part.disk, prefix, part.realnum) 138 device_name = "/dev/%s%s%d" % (part.disk, prefix, part.realnum)
139 139
140 opts = part.fsopts if part.fsopts else "defaults" 140 opts = part.fsopts if part.fsopts else "defaults"
141 passno = part.fspassno if part.fspassno else "0"
141 line = "\t".join([device_name, part.mountpoint, part.fstype, 142 line = "\t".join([device_name, part.mountpoint, part.fstype,
142 opts, "0", "0"]) + "\n" 143 opts, "0", passno]) + "\n"
143 144
144 fstab_lines.append(line) 145 fstab_lines.append(line)
145 updated = True 146 updated = True
@@ -259,7 +260,7 @@ class DirectPlugin(ImagerPlugin):
259 if part.mountpoint == "/": 260 if part.mountpoint == "/":
260 if part.uuid: 261 if part.uuid:
261 return "PARTUUID=%s" % part.uuid 262 return "PARTUUID=%s" % part.uuid
262 elif part.label: 263 elif part.label and self.ptable_format != 'msdos':
263 return "PARTLABEL=%s" % part.label 264 return "PARTLABEL=%s" % part.label
264 else: 265 else:
265 suffix = 'p' if part.disk.startswith('mmcblk') else '' 266 suffix = 'p' if part.disk.startswith('mmcblk') else ''
diff --git a/scripts/lib/wic/plugins/source/rawcopy.py b/scripts/lib/wic/plugins/source/rawcopy.py
index fa7b1eb..7c90cd3 100644
--- a/scripts/lib/wic/plugins/source/rawcopy.py
+++ b/scripts/lib/wic/plugins/source/rawcopy.py
@@ -4,6 +4,8 @@
4 4
5import logging 5import logging
6import os 6import os
7import signal
8import subprocess
7 9
8from wic import WicError 10from wic import WicError
9from wic.pluginbase import SourcePlugin 11from wic.pluginbase import SourcePlugin
@@ -38,6 +40,25 @@ class RawCopyPlugin(SourcePlugin):
38 40
39 exec_cmd(cmd) 41 exec_cmd(cmd)
40 42
43 @staticmethod
44 def do_image_uncompression(src, dst, workdir):
45 def subprocess_setup():
46 # Python installs a SIGPIPE handler by default. This is usually not what
47 # non-Python subprocesses expect.
48 # SIGPIPE errors are known issues with gzip/bash
49 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
50
51 extension = os.path.splitext(src)[1]
52 decompressor = {
53 ".bz2": "bzip2",
54 ".gz": "gzip",
55 ".xz": "xz"
56 }.get(extension)
57 if not decompressor:
58 raise WicError("Not supported compressor filename extension: %s" % extension)
59 cmd = "%s -dc %s > %s" % (decompressor, src, dst)
60 subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=workdir)
61
41 @classmethod 62 @classmethod
42 def do_prepare_partition(cls, part, source_params, cr, cr_workdir, 63 def do_prepare_partition(cls, part, source_params, cr, cr_workdir,
43 oe_builddir, bootimg_dir, kernel_dir, 64 oe_builddir, bootimg_dir, kernel_dir,
@@ -56,7 +77,13 @@ class RawCopyPlugin(SourcePlugin):
56 if 'file' not in source_params: 77 if 'file' not in source_params:
57 raise WicError("No file specified") 78 raise WicError("No file specified")
58 79
59 src = os.path.join(kernel_dir, source_params['file']) 80 if 'unpack' in source_params:
81 img = os.path.join(kernel_dir, source_params['file'])
82 src = os.path.join(cr_workdir, os.path.splitext(source_params['file'])[0])
83 RawCopyPlugin.do_image_uncompression(img, src, cr_workdir)
84 else:
85 src = os.path.join(kernel_dir, source_params['file'])
86
60 dst = os.path.join(cr_workdir, "%s.%s" % (os.path.basename(source_params['file']), part.lineno)) 87 dst = os.path.join(cr_workdir, "%s.%s" % (os.path.basename(source_params['file']), part.lineno))
61 88
62 if not os.path.exists(os.path.dirname(dst)): 89 if not os.path.exists(os.path.dirname(dst)):
diff --git a/scripts/lib/wic/plugins/source/rootfs.py b/scripts/lib/wic/plugins/source/rootfs.py
index 2e34e71..25bb41d 100644
--- a/scripts/lib/wic/plugins/source/rootfs.py
+++ b/scripts/lib/wic/plugins/source/rootfs.py
@@ -50,7 +50,7 @@ class RootfsPlugin(SourcePlugin):
50 50
51 @staticmethod 51 @staticmethod
52 def __get_rootfs_dir(rootfs_dir): 52 def __get_rootfs_dir(rootfs_dir):
53 if os.path.isdir(rootfs_dir): 53 if rootfs_dir and os.path.isdir(rootfs_dir):
54 return os.path.realpath(rootfs_dir) 54 return os.path.realpath(rootfs_dir)
55 55
56 image_rootfs_dir = get_bitbake_var("IMAGE_ROOTFS", rootfs_dir) 56 image_rootfs_dir = get_bitbake_var("IMAGE_ROOTFS", rootfs_dir)
@@ -97,6 +97,9 @@ class RootfsPlugin(SourcePlugin):
97 part.has_fstab = os.path.exists(os.path.join(part.rootfs_dir, "etc/fstab")) 97 part.has_fstab = os.path.exists(os.path.join(part.rootfs_dir, "etc/fstab"))
98 pseudo_dir = os.path.join(part.rootfs_dir, "../pseudo") 98 pseudo_dir = os.path.join(part.rootfs_dir, "../pseudo")
99 if not os.path.lexists(pseudo_dir): 99 if not os.path.lexists(pseudo_dir):
100 pseudo_dir = os.path.join(cls.__get_rootfs_dir(None), '../pseudo')
101
102 if not os.path.lexists(pseudo_dir):
100 logger.warn("%s folder does not exist. " 103 logger.warn("%s folder does not exist. "
101 "Usernames and permissions will be invalid " % pseudo_dir) 104 "Usernames and permissions will be invalid " % pseudo_dir)
102 pseudo_dir = None 105 pseudo_dir = None
diff --git a/scripts/oe-buildenv-internal b/scripts/oe-buildenv-internal
index e0d920f..485d4c5 100755
--- a/scripts/oe-buildenv-internal
+++ b/scripts/oe-buildenv-internal
@@ -106,13 +106,13 @@ unset BITBAKEDIR newpath
106export BUILDDIR 106export BUILDDIR
107export PATH 107export PATH
108 108
109BB_ENV_EXTRAWHITE_OE="MACHINE DISTRO TCMODE TCLIBC HTTP_PROXY http_proxy \ 109BB_ENV_PASSTHROUGH_ADDITIONS_OE="MACHINE DISTRO TCMODE TCLIBC HTTP_PROXY http_proxy \
110HTTPS_PROXY https_proxy FTP_PROXY ftp_proxy FTPS_PROXY ftps_proxy ALL_PROXY \ 110HTTPS_PROXY https_proxy FTP_PROXY ftp_proxy FTPS_PROXY ftps_proxy ALL_PROXY \
111all_proxy NO_PROXY no_proxy SSH_AGENT_PID SSH_AUTH_SOCK BB_SRCREV_POLICY \ 111all_proxy NO_PROXY no_proxy SSH_AGENT_PID SSH_AUTH_SOCK BB_SRCREV_POLICY \
112SDKMACHINE BB_NUMBER_THREADS BB_NO_NETWORK PARALLEL_MAKE GIT_PROXY_COMMAND \ 112SDKMACHINE BB_NUMBER_THREADS BB_NO_NETWORK PARALLEL_MAKE GIT_PROXY_COMMAND \
113SOCKS5_PASSWD SOCKS5_USER SCREENDIR STAMPS_DIR BBPATH_EXTRA BB_SETSCENE_ENFORCE \ 113SOCKS5_PASSWD SOCKS5_USER SCREENDIR STAMPS_DIR BBPATH_EXTRA BB_SETSCENE_ENFORCE \
114BB_LOGCONFIG" 114BB_LOGCONFIG"
115 115
116BB_ENV_EXTRAWHITE="$(echo $BB_ENV_EXTRAWHITE $BB_ENV_EXTRAWHITE_OE | tr ' ' '\n' | LC_ALL=C sort --unique | tr '\n' ' ')" 116BB_ENV_PASSTHROUGH_ADDITIONS="$(echo $BB_ENV_PASSTHROUGH_ADDITIONS $BB_ENV_PASSTHROUGH_ADDITIONS_OE | tr ' ' '\n' | LC_ALL=C sort --unique | tr '\n' ' ')"
117 117
118export BB_ENV_EXTRAWHITE 118export BB_ENV_PASSTHROUGH_ADDITIONS
diff --git a/scripts/oe-check-sstate b/scripts/oe-check-sstate
index 59bcb32..f4cc586 100755
--- a/scripts/oe-check-sstate
+++ b/scripts/oe-check-sstate
@@ -47,7 +47,7 @@ def check(args):
47 try: 47 try:
48 env = os.environ.copy() 48 env = os.environ.copy()
49 if not args.same_tmpdir: 49 if not args.same_tmpdir:
50 env['BB_ENV_EXTRAWHITE'] = env.get('BB_ENV_EXTRAWHITE', '') + ' TMPDIR:forcevariable' 50 env['BB_ENV_PASSTHROUGH_ADDITIONS'] = env.get('BB_ENV_PASSTHROUGH_ADDITIONS', '') + ' TMPDIR:forcevariable'
51 env['TMPDIR:forcevariable'] = tmpdir 51 env['TMPDIR:forcevariable'] = tmpdir
52 52
53 try: 53 try:
diff --git a/scripts/oe-pkgdata-util b/scripts/oe-pkgdata-util
index 71656da..7412cc1 100755
--- a/scripts/oe-pkgdata-util
+++ b/scripts/oe-pkgdata-util
@@ -296,7 +296,7 @@ def package_info(args):
296 extra = '' 296 extra = ''
297 for line in f: 297 for line in f:
298 for var in vars: 298 for var in vars:
299 m = re.match(var + '(?:_\S+)?:\s*(.+?)\s*$', line) 299 m = re.match(var + '(?::\S+)?:\s*(.+?)\s*$', line)
300 if m: 300 if m:
301 vals[var] = m.group(1) 301 vals[var] = m.group(1)
302 pkg_version = vals['PKGV'] or '' 302 pkg_version = vals['PKGV'] or ''
diff --git a/scripts/oe-setup-builddir b/scripts/oe-setup-builddir
index 5a51fa7..54048e6 100755
--- a/scripts/oe-setup-builddir
+++ b/scripts/oe-setup-builddir
@@ -42,7 +42,7 @@ if [ -f "$BUILDDIR/conf/templateconf.cfg" ]; then
42 TEMPLATECONF=$(cat "$BUILDDIR/conf/templateconf.cfg") 42 TEMPLATECONF=$(cat "$BUILDDIR/conf/templateconf.cfg")
43fi 43fi
44 44
45. $OEROOT/.templateconf 45. "$OEROOT"/.templateconf
46 46
47if [ ! -f "$BUILDDIR/conf/templateconf.cfg" ]; then 47if [ ! -f "$BUILDDIR/conf/templateconf.cfg" ]; then
48 echo "$TEMPLATECONF" >"$BUILDDIR/conf/templateconf.cfg" 48 echo "$TEMPLATECONF" >"$BUILDDIR/conf/templateconf.cfg"
@@ -79,7 +79,7 @@ example, select a different MACHINE (target hardware). See conf/local.conf
79for more information as common configuration options are commented. 79for more information as common configuration options are commented.
80 80
81EOM 81EOM
82 cp -f $OECORELOCALCONF "$BUILDDIR/conf/local.conf" 82 cp -f "$OECORELOCALCONF" "$BUILDDIR/conf/local.conf"
83 SHOWYPDOC=yes 83 SHOWYPDOC=yes
84fi 84fi
85 85
@@ -94,13 +94,13 @@ into your configuration please add entries to conf/bblayers.conf.
94 94
95EOM 95EOM
96 96
97 # Put the abosolute path to the layers in bblayers.conf so we can run 97 # Put the absolute path to the layers in bblayers.conf so we can run
98 # bitbake without the init script after the first run 98 # bitbake without the init script after the first run.
99 # ##COREBASE## is deprecated as it's meaning was inconsistent, but continue 99 # ##COREBASE## is deprecated as its meaning was inconsistent, but continue
100 # to replace it for compatibility. 100 # to replace it for compatibility.
101 sed -e "s|##OEROOT##|$OEROOT|g" \ 101 sed -e "s|##OEROOT##|$OEROOT|g" \
102 -e "s|##COREBASE##|$OEROOT|g" \ 102 -e "s|##COREBASE##|$OEROOT|g" \
103 $OECORELAYERCONF > "$BUILDDIR/conf/bblayers.conf" 103 "$OECORELAYERCONF" > "$BUILDDIR/conf/bblayers.conf"
104 SHOWYPDOC=yes 104 SHOWYPDOC=yes
105fi 105fi
106 106
@@ -115,7 +115,7 @@ The Yocto Project has extensive documentation about OE including a reference
115manual which can be found at: 115manual which can be found at:
116 https://docs.yoctoproject.org 116 https://docs.yoctoproject.org
117 117
118For more information about OpenEmbedded see their website: 118For more information about OpenEmbedded see the website:
119 https://www.openembedded.org/ 119 https://www.openembedded.org/
120 120
121EOM 121EOM
@@ -125,5 +125,5 @@ fi
125if [ -z "$OECORENOTESCONF" ]; then 125if [ -z "$OECORENOTESCONF" ]; then
126 OECORENOTESCONF="$OEROOT/meta/conf/conf-notes.txt" 126 OECORENOTESCONF="$OEROOT/meta/conf/conf-notes.txt"
127fi 127fi
128[ ! -r "$OECORENOTESCONF" ] || cat $OECORENOTESCONF 128[ ! -r "$OECORENOTESCONF" ] || cat "$OECORENOTESCONF"
129unset OECORENOTESCONF 129unset OECORENOTESCONF
diff --git a/scripts/postinst-intercepts/update_udev_hwdb b/scripts/postinst-intercepts/update_udev_hwdb
index 8076b8a..8b3f5de 100644
--- a/scripts/postinst-intercepts/update_udev_hwdb
+++ b/scripts/postinst-intercepts/update_udev_hwdb
@@ -9,14 +9,17 @@ case "${PREFERRED_PROVIDER_udev}" in
9 systemd) 9 systemd)
10 UDEV_EXTRA_ARGS="--usr" 10 UDEV_EXTRA_ARGS="--usr"
11 UDEVLIBDIR="${rootlibexecdir}" 11 UDEVLIBDIR="${rootlibexecdir}"
12 UDEVADM="${base_bindir}/udevadm"
12 ;; 13 ;;
13 14
14 *) 15 *)
15 UDEV_EXTRA_ARGS="" 16 UDEV_EXTRA_ARGS=""
16 UDEVLIBDIR="${sysconfdir}" 17 UDEVLIBDIR="${sysconfdir}"
18 UDEVADM="${bindir}/udevadm"
17 ;; 19 ;;
18esac 20esac
19 21
20rm -f $D${UDEVLIBDIR}/udev/hwdb.bin 22rm -f $D${UDEVLIBDIR}/udev/hwdb.bin
21PSEUDO_UNLOAD=1 ${binprefix}qemuwrapper -L $D $D${libexecdir}/${binprefix}udevadm hwdb --update --root $D ${UDEV_EXTRA_ARGS} 23PSEUDO_UNLOAD=1 ${binprefix}qemuwrapper -L $D $D${UDEVADM} hwdb --update --root $D ${UDEV_EXTRA_ARGS} ||
24 PSEUDO_UNLOAD=1 qemuwrapper -L $D $D${UDEVADM} hwdb --update --root $D ${UDEV_EXTRA_ARGS}
22chown root:root $D${UDEVLIBDIR}/udev/hwdb.bin 25chown root:root $D${UDEVLIBDIR}/udev/hwdb.bin
diff --git a/scripts/relocate_sdk.py b/scripts/relocate_sdk.py
index 8c0fdb9..4ed8bfc 100755
--- a/scripts/relocate_sdk.py
+++ b/scripts/relocate_sdk.py
@@ -30,9 +30,16 @@ else:
30old_prefix = re.compile(b("##DEFAULT_INSTALL_DIR##")) 30old_prefix = re.compile(b("##DEFAULT_INSTALL_DIR##"))
31 31
32def get_arch(): 32def get_arch():
33 global endian_prefix
33 f.seek(0) 34 f.seek(0)
34 e_ident =f.read(16) 35 e_ident =f.read(16)
35 ei_mag0,ei_mag1_3,ei_class = struct.unpack("<B3sB11x", e_ident) 36 ei_mag0,ei_mag1_3,ei_class,ei_data,ei_version = struct.unpack("<B3sBBB9x", e_ident)
37
38 # ei_data = 1 for little-endian & 0 for big-endian
39 if ei_data == 1:
40 endian_prefix = '<'
41 else:
42 endian_prefix = '>'
36 43
37 if (ei_mag0 != 0x7f and ei_mag1_3 != "ELF") or ei_class == 0: 44 if (ei_mag0 != 0x7f and ei_mag1_3 != "ELF") or ei_class == 0:
38 return 0 45 return 0
@@ -51,11 +58,11 @@ def parse_elf_header():
51 58
52 if arch == 32: 59 if arch == 32:
53 # 32bit 60 # 32bit
54 hdr_fmt = "<HHILLLIHHHHHH" 61 hdr_fmt = endian_prefix + "HHILLLIHHHHHH"
55 hdr_size = 52 62 hdr_size = 52
56 else: 63 else:
57 # 64bit 64 # 64bit
58 hdr_fmt = "<HHIQQQIHHHHHH" 65 hdr_fmt = endian_prefix + "HHIQQQIHHHHHH"
59 hdr_size = 64 66 hdr_size = 64
60 67
61 e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, e_flags,\ 68 e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, e_flags,\
@@ -64,9 +71,9 @@ def parse_elf_header():
64 71
65def change_interpreter(elf_file_name): 72def change_interpreter(elf_file_name):
66 if arch == 32: 73 if arch == 32:
67 ph_fmt = "<IIIIIIII" 74 ph_fmt = endian_prefix + "IIIIIIII"
68 else: 75 else:
69 ph_fmt = "<IIQQQQQQ" 76 ph_fmt = endian_prefix + "IIQQQQQQ"
70 77
71 """ look for PT_INTERP section """ 78 """ look for PT_INTERP section """
72 for i in range(0,e_phnum): 79 for i in range(0,e_phnum):
@@ -105,17 +112,17 @@ def change_interpreter(elf_file_name):
105 112
106def change_dl_sysdirs(elf_file_name): 113def change_dl_sysdirs(elf_file_name):
107 if arch == 32: 114 if arch == 32:
108 sh_fmt = "<IIIIIIIIII" 115 sh_fmt = endian_prefix + "IIIIIIIIII"
109 else: 116 else:
110 sh_fmt = "<IIQQQQIIQQ" 117 sh_fmt = endian_prefix + "IIQQQQIIQQ"
111 118
112 """ read section string table """ 119 """ read section string table """
113 f.seek(e_shoff + e_shstrndx * e_shentsize) 120 f.seek(e_shoff + e_shstrndx * e_shentsize)
114 sh_hdr = f.read(e_shentsize) 121 sh_hdr = f.read(e_shentsize)
115 if arch == 32: 122 if arch == 32:
116 sh_offset, sh_size = struct.unpack("<16xII16x", sh_hdr) 123 sh_offset, sh_size = struct.unpack(endian_prefix + "16xII16x", sh_hdr)
117 else: 124 else:
118 sh_offset, sh_size = struct.unpack("<24xQQ24x", sh_hdr) 125 sh_offset, sh_size = struct.unpack(endian_prefix + "24xQQ24x", sh_hdr)
119 126
120 f.seek(sh_offset) 127 f.seek(sh_offset)
121 sh_strtab = f.read(sh_size) 128 sh_strtab = f.read(sh_size)
diff --git a/scripts/runqemu b/scripts/runqemu
index efb98ab..6e1f073 100755
--- a/scripts/runqemu
+++ b/scripts/runqemu
@@ -73,11 +73,11 @@ of the following environment variables (in any order):
73 gl-es - enable virgl-based GL acceleration, using OpenGL ES (also needs gtk or sdl options) 73 gl-es - enable virgl-based GL acceleration, using OpenGL ES (also needs gtk or sdl options)
74 egl-headless - enable headless EGL output; use vnc (via publicvnc option) or spice to see it 74 egl-headless - enable headless EGL output; use vnc (via publicvnc option) or spice to see it
75 (hint: if /dev/dri/renderD* is absent due to lack of suitable GPU, 'modprobe vgem' will create 75 (hint: if /dev/dri/renderD* is absent due to lack of suitable GPU, 'modprobe vgem' will create
76 one sutable for mesa llvmpipe sofware renderer) 76 one suitable for mesa llvmpipe software renderer)
77 serial - enable a serial console on /dev/ttyS0 77 serial - enable a serial console on /dev/ttyS0
78 serialstdio - enable a serial console on the console (regardless of graphics mode) 78 serialstdio - enable a serial console on the console (regardless of graphics mode)
79 slirp - enable user networking, no root privileges is required 79 slirp - enable user networking, no root privilege is required
80 snapshot - don't write changes to back to images 80 snapshot - don't write changes back to images
81 kvm - enable KVM when running x86/x86_64 (VT-capable CPU required) 81 kvm - enable KVM when running x86/x86_64 (VT-capable CPU required)
82 kvm-vhost - enable KVM with vhost when running x86/x86_64 (VT-capable CPU required) 82 kvm-vhost - enable KVM with vhost when running x86/x86_64 (VT-capable CPU required)
83 publicvnc - enable a VNC server open to all hosts 83 publicvnc - enable a VNC server open to all hosts
@@ -182,6 +182,7 @@ class BaseConfig(object):
182 self.gl = False 182 self.gl = False
183 self.gl_es = False 183 self.gl_es = False
184 self.egl_headless = False 184 self.egl_headless = False
185 self.publicvnc = False
185 self.novga = False 186 self.novga = False
186 self.cleantap = False 187 self.cleantap = False
187 self.saved_stty = '' 188 self.saved_stty = ''
@@ -199,7 +200,7 @@ class BaseConfig(object):
199 self.fsinfo = {} 200 self.fsinfo = {}
200 self.network_device = "-device e1000,netdev=net0,mac=@MAC@" 201 self.network_device = "-device e1000,netdev=net0,mac=@MAC@"
201 self.cmdline_ip_slirp = "ip=dhcp" 202 self.cmdline_ip_slirp = "ip=dhcp"
202 self.cmdline_ip_tap = "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0" 203 self.cmdline_ip_tap = "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0::eth0:off:8.8.8.8"
203 # Use different mac section for tap and slirp to avoid 204 # Use different mac section for tap and slirp to avoid
204 # conflicts, e.g., when one is running with tap, the other is 205 # conflicts, e.g., when one is running with tap, the other is
205 # running with slirp. 206 # running with slirp.
@@ -352,10 +353,10 @@ class BaseConfig(object):
352 def check_arg_path(self, p): 353 def check_arg_path(self, p):
353 """ 354 """
354 - Check whether it is <image>.qemuboot.conf or contains <image>.qemuboot.conf 355 - Check whether it is <image>.qemuboot.conf or contains <image>.qemuboot.conf
355 - Check whether is a kernel file 356 - Check whether it is a kernel file
356 - Check whether is a image file 357 - Check whether it is an image file
357 - Check whether it is a nfs dir 358 - Check whether it is an NFS dir
358 - Check whether it is a OVMF flash file 359 - Check whether it is an OVMF flash file
359 """ 360 """
360 if p.endswith('.qemuboot.conf'): 361 if p.endswith('.qemuboot.conf'):
361 self.qemuboot = p 362 self.qemuboot = p
@@ -366,7 +367,7 @@ class BaseConfig(object):
366 self.kernel = p 367 self.kernel = p
367 elif os.path.exists(p) and (not os.path.isdir(p)) and '-image-' in os.path.basename(p): 368 elif os.path.exists(p) and (not os.path.isdir(p)) and '-image-' in os.path.basename(p):
368 self.rootfs = p 369 self.rootfs = p
369 # Check filename against self.fstypes can hanlde <file>.cpio.gz, 370 # Check filename against self.fstypes can handle <file>.cpio.gz,
370 # otherwise, its type would be "gz", which is incorrect. 371 # otherwise, its type would be "gz", which is incorrect.
371 fst = "" 372 fst = ""
372 for t in self.fstypes: 373 for t in self.fstypes:
@@ -461,6 +462,14 @@ class BaseConfig(object):
461 raise RunQemuError("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.") 462 raise RunQemuError("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.")
462 os.environ['LIBGL_DRIVERS_PATH'] = dripath.decode('utf-8').strip() 463 os.environ['LIBGL_DRIVERS_PATH'] = dripath.decode('utf-8').strip()
463 464
465 # This preloads uninative libc pieces and therefore ensures that RPATH/RUNPATH
466 # in host mesa drivers doesn't trick uninative into loading host libc.
467 preload_items = ['libdl.so.2', 'librt.so.1', 'libpthread.so.0']
468 uninative_path = os.path.dirname(self.get("UNINATIVE_LOADER"))
469 if os.path.exists(uninative_path):
470 preload_paths = [os.path.join(uninative_path, i) for i in preload_items]
471 os.environ['LD_PRELOAD'] = " ".join(preload_paths)
472
464 def check_args(self): 473 def check_args(self):
465 for debug in ("-d", "--debug"): 474 for debug in ("-d", "--debug"):
466 if debug in sys.argv: 475 if debug in sys.argv:
@@ -474,6 +483,7 @@ class BaseConfig(object):
474 483
475 if 'gl' not in sys.argv[1:] and 'gl-es' not in sys.argv[1:]: 484 if 'gl' not in sys.argv[1:] and 'gl-es' not in sys.argv[1:]:
476 os.environ['SDL_RENDER_DRIVER'] = 'software' 485 os.environ['SDL_RENDER_DRIVER'] = 'software'
486 os.environ['SDL_FRAMEBUFFER_ACCELERATION'] = 'false'
477 487
478 unknown_arg = "" 488 unknown_arg = ""
479 for arg in sys.argv[1:]: 489 for arg in sys.argv[1:]:
@@ -512,6 +522,7 @@ class BaseConfig(object):
512 elif arg == 'snapshot': 522 elif arg == 'snapshot':
513 self.snapshot = True 523 self.snapshot = True
514 elif arg == 'publicvnc': 524 elif arg == 'publicvnc':
525 self.publicvnc = True
515 self.qemu_opt_script += ' -vnc :0' 526 self.qemu_opt_script += ' -vnc :0'
516 elif arg.startswith('tcpserial='): 527 elif arg.startswith('tcpserial='):
517 self.tcpserial_portnum = '%s' % arg[len('tcpserial='):] 528 self.tcpserial_portnum = '%s' % arg[len('tcpserial='):]
@@ -805,7 +816,7 @@ class BaseConfig(object):
805 self.set('QB_MEM', qb_mem) 816 self.set('QB_MEM', qb_mem)
806 817
807 mach = self.get('MACHINE') 818 mach = self.get('MACHINE')
808 if not mach.startswith('qemumips'): 819 if not mach.startswith(('qemumips', 'qemux86')):
809 self.kernel_cmdline_script += ' mem=%s' % self.get('QB_MEM').replace('-m','').strip() + 'M' 820 self.kernel_cmdline_script += ' mem=%s' % self.get('QB_MEM').replace('-m','').strip() + 'M'
810 821
811 self.qemu_opt_script += ' %s' % self.get('QB_MEM') 822 self.qemu_opt_script += ' %s' % self.get('QB_MEM')
@@ -1354,13 +1365,27 @@ class BaseConfig(object):
1354 if (self.gl_es == True or self.gl == True) and (self.sdl == False and self.gtk == False): 1365 if (self.gl_es == True or self.gl == True) and (self.sdl == False and self.gtk == False):
1355 raise RunQemuError('Option gl/gl-es needs gtk or sdl option.') 1366 raise RunQemuError('Option gl/gl-es needs gtk or sdl option.')
1356 1367
1357 if self.sdl == True or self.gtk == True or self.egl_headless == True: 1368 # If we have no display option, we autodetect based upon what qemu supports. We
1358 if self.gl or self.gl_es or self.egl_headless: 1369 # need our font setup and show-cusor below so we need to see what qemu --help says
1359 self.qemu_opt += ' -device virtio-vga-gl ' 1370 # is supported so we can pass our correct config in.
1371 if not self.nographic and not self.sdl and not self.gtk and not self.publicvnc and not self.egl_headless == True:
1372 output = subprocess.check_output([self.qemu_bin, "--help"], universal_newlines=True)
1373 if "-display gtk" in output:
1374 self.gtk = True
1375 elif "-display sdl" in output:
1376 self.sdl = True
1360 else: 1377 else:
1361 self.qemu_opt += ' -device virtio-vga ' 1378 self.qemu_opt += '-display none'
1379
1380 if self.sdl == True or self.gtk == True or self.egl_headless == True:
1362 1381
1363 self.qemu_opt += '-display ' 1382 if self.qemu_system.endswith(('i386', 'x86_64')):
1383 if self.gl or self.gl_es or self.egl_headless:
1384 self.qemu_opt += ' -device virtio-vga-gl '
1385 else:
1386 self.qemu_opt += ' -device virtio-vga '
1387
1388 self.qemu_opt += ' -display '
1364 if self.egl_headless == True: 1389 if self.egl_headless == True:
1365 self.set_dri_path() 1390 self.set_dri_path()
1366 self.qemu_opt += 'egl-headless,' 1391 self.qemu_opt += 'egl-headless,'
@@ -1368,6 +1393,7 @@ class BaseConfig(object):
1368 if self.sdl == True: 1393 if self.sdl == True:
1369 self.qemu_opt += 'sdl,' 1394 self.qemu_opt += 'sdl,'
1370 elif self.gtk == True: 1395 elif self.gtk == True:
1396 os.environ['FONTCONFIG_PATH'] = '/etc/fonts'
1371 self.qemu_opt += 'gtk,' 1397 self.qemu_opt += 'gtk,'
1372 1398
1373 if self.gl == True: 1399 if self.gl == True:
@@ -1405,7 +1431,7 @@ class BaseConfig(object):
1405 if serial_num < 2: 1431 if serial_num < 2:
1406 self.qemu_opt += " -serial null" 1432 self.qemu_opt += " -serial null"
1407 1433
1408 def setup_final(self): 1434 def find_qemu(self):
1409 qemu_bin = os.path.join(self.bindir_native, self.qemu_system) 1435 qemu_bin = os.path.join(self.bindir_native, self.qemu_system)
1410 1436
1411 # It is possible to have qemu-native in ASSUME_PROVIDED, and it won't 1437 # It is possible to have qemu-native in ASSUME_PROVIDED, and it won't
@@ -1424,8 +1450,13 @@ class BaseConfig(object):
1424 1450
1425 if not os.access(qemu_bin, os.X_OK): 1451 if not os.access(qemu_bin, os.X_OK):
1426 raise OEPathError("No QEMU binary '%s' could be found" % qemu_bin) 1452 raise OEPathError("No QEMU binary '%s' could be found" % qemu_bin)
1453 self.qemu_bin = qemu_bin
1454
1455 def setup_final(self):
1456
1457 self.find_qemu()
1427 1458
1428 self.qemu_opt = "%s %s %s %s %s" % (qemu_bin, self.get('NETWORK_CMD'), self.get('QB_RNG'), self.get('ROOTFS_OPTIONS'), self.get('QB_OPT_APPEND').replace('@DEPLOY_DIR_IMAGE@', self.get('DEPLOY_DIR_IMAGE'))) 1459 self.qemu_opt = "%s %s %s %s %s" % (self.qemu_bin, self.get('NETWORK_CMD'), self.get('QB_RNG'), self.get('ROOTFS_OPTIONS'), self.get('QB_OPT_APPEND').replace('@DEPLOY_DIR_IMAGE@', self.get('DEPLOY_DIR_IMAGE')))
1429 1460
1430 for ovmf in self.ovmf_bios: 1461 for ovmf in self.ovmf_bios:
1431 format = ovmf.rsplit('.', 1)[-1] 1462 format = ovmf.rsplit('.', 1)[-1]
@@ -1593,7 +1624,8 @@ def main():
1593 1624
1594 def sigterm_handler(signum, frame): 1625 def sigterm_handler(signum, frame):
1595 logger.info("SIGTERM received") 1626 logger.info("SIGTERM received")
1596 os.kill(config.qemupid, signal.SIGTERM) 1627 if config.qemupid:
1628 os.kill(config.qemupid, signal.SIGTERM)
1597 config.cleanup() 1629 config.cleanup()
1598 # Deliberately ignore the return code of 'tput smam'. 1630 # Deliberately ignore the return code of 'tput smam'.
1599 subprocess.call(["tput", "smam"]) 1631 subprocess.call(["tput", "smam"])
diff --git a/scripts/runqemu-addptable2image b/scripts/runqemu-addptable2image
index ca29427..87a8da3 100755
--- a/scripts/runqemu-addptable2image
+++ b/scripts/runqemu-addptable2image
@@ -1,6 +1,6 @@
1#!/bin/sh 1#!/bin/sh
2 2
3# Add a partion table to an ext2 image file 3# Add a partition table to an ext2 image file
4# 4#
5# Copyright (C) 2006-2007 OpenedHand Ltd. 5# Copyright (C) 2006-2007 OpenedHand Ltd.
6# 6#
diff --git a/scripts/runqemu-ifdown b/scripts/runqemu-ifdown
index a104c37..e0eb534 100755
--- a/scripts/runqemu-ifdown
+++ b/scripts/runqemu-ifdown
@@ -64,3 +64,4 @@ n=$[ (`echo $TAP | sed 's/tap//'` * 2) + 1 ]
64dest=$[ (`echo $TAP | sed 's/tap//'` * 2) + 2 ] 64dest=$[ (`echo $TAP | sed 's/tap//'` * 2) + 2 ]
65$IPTABLES -D POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$n/32 65$IPTABLES -D POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$n/32
66$IPTABLES -D POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$dest/32 66$IPTABLES -D POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$dest/32
67true
diff --git a/scripts/sstate-sysroot-cruft.sh b/scripts/sstate-sysroot-cruft.sh
index fbf1ca3..9c948e9 100755
--- a/scripts/sstate-sysroot-cruft.sh
+++ b/scripts/sstate-sysroot-cruft.sh
@@ -145,18 +145,6 @@ WHITELIST="${WHITELIST} \
145 .*/var/cache/fontconfig/ \ 145 .*/var/cache/fontconfig/ \
146" 146"
147 147
148# created by oe.utils.write_ld_so_conf which is used from few bbclasses and recipes:
149# meta/classes/image-prelink.bbclass: oe.utils.write_ld_so_conf(d)
150# meta/classes/insane.bbclass: oe.utils.write_ld_so_conf(d)
151# meta/classes/insane.bbclass: oe.utils.write_ld_so_conf(d)
152# meta/recipes-gnome/gobject-introspection/gobject-introspection_1.48.0.bb: oe.utils.write_ld_so_conf(d)
153# meta/recipes-gnome/gobject-introspection/gobject-introspection_1.48.0.bb: oe.utils.write_ld_so_conf(d)
154# introduced in oe-core commit 7fd1d7e639c2ed7e0699937a5cb245c187b7c811
155# and more visible since added to gobject-introspection in 10e0c1a3a452baa05d160a92a54b2e33cf0fd061
156WHITELIST="${WHITELIST} \
157 [^/]*/etc/ld.so.conf \
158"
159
160SYSROOTS="`readlink -f ${tmpdir}`/sysroots/" 148SYSROOTS="`readlink -f ${tmpdir}`/sysroots/"
161 149
162mkdir ${OUTPUT} 150mkdir ${OUTPUT}
diff --git a/scripts/verify-bashisms b/scripts/verify-bashisms
index 14d8c29..ec2374f 100755
--- a/scripts/verify-bashisms
+++ b/scripts/verify-bashisms
@@ -5,7 +5,7 @@
5 5
6import sys, os, subprocess, re, shutil 6import sys, os, subprocess, re, shutil
7 7
8whitelist = ( 8allowed = (
9 # type is supported by dash 9 # type is supported by dash
10 'if type systemctl >/dev/null 2>/dev/null; then', 10 'if type systemctl >/dev/null 2>/dev/null; then',
11 'if type systemd-tmpfiles >/dev/null 2>/dev/null; then', 11 'if type systemd-tmpfiles >/dev/null 2>/dev/null; then',
@@ -19,8 +19,8 @@ whitelist = (
19 '. $target_sdk_dir/${oe_init_build_env_path} $target_sdk_dir >> $LOGFILE' 19 '. $target_sdk_dir/${oe_init_build_env_path} $target_sdk_dir >> $LOGFILE'
20 ) 20 )
21 21
22def is_whitelisted(s): 22def is_allowed(s):
23 for w in whitelist: 23 for w in allowed:
24 if w in s: 24 if w in s:
25 return True 25 return True
26 return False 26 return False
@@ -49,7 +49,7 @@ def process(filename, function, lineno, script):
49 output = e.output.replace(fn.name, function) 49 output = e.output.replace(fn.name, function)
50 if not output or not output.startswith('possible bashism'): 50 if not output or not output.startswith('possible bashism'):
51 # Probably starts with or contains only warnings. Dump verbatim 51 # Probably starts with or contains only warnings. Dump verbatim
52 # with one space indention. Can't do the splitting and whitelist 52 # with one space indention. Can't do the splitting and allowed
53 # checking below. 53 # checking below.
54 return '\n'.join([filename, 54 return '\n'.join([filename,
55 ' Unexpected output from checkbashisms.pl'] + 55 ' Unexpected output from checkbashisms.pl'] +
@@ -65,7 +65,7 @@ def process(filename, function, lineno, script):
65 # ... 65 # ...
66 # ... 66 # ...
67 result = [] 67 result = []
68 # Check the results against the whitelist 68 # Check the results against the allowed list
69 for message, source in zip(output[0::2], output[1::2]): 69 for message, source in zip(output[0::2], output[1::2]):
70 if not is_whitelisted(source): 70 if not is_whitelisted(source):
71 if lineno is not None: 71 if lineno is not None:
diff --git a/scripts/wic b/scripts/wic
index 4bcff8f..aee63a4 100755
--- a/scripts/wic
+++ b/scripts/wic
@@ -159,6 +159,9 @@ def wic_create_subcommand(options, usage_str):
159 "(Use -e/--image-name to specify it)") 159 "(Use -e/--image-name to specify it)")
160 native_sysroot = options.native_sysroot 160 native_sysroot = options.native_sysroot
161 161
162 if options.kernel_dir:
163 kernel_dir = options.kernel_dir
164
162 if not options.vars_dir and (not native_sysroot or not os.path.isdir(native_sysroot)): 165 if not options.vars_dir and (not native_sysroot or not os.path.isdir(native_sysroot)):
163 logger.info("Building wic-tools...\n") 166 logger.info("Building wic-tools...\n")
164 subprocess.check_call(["bitbake", "wic-tools"]) 167 subprocess.check_call(["bitbake", "wic-tools"])
diff --git a/scripts/yocto-check-layer b/scripts/yocto-check-layer
index 2445ad5..0e5b75b 100755
--- a/scripts/yocto-check-layer
+++ b/scripts/yocto-check-layer
@@ -24,7 +24,7 @@ import scriptpath
24scriptpath.add_oe_lib_path() 24scriptpath.add_oe_lib_path()
25scriptpath.add_bitbake_lib_path() 25scriptpath.add_bitbake_lib_path()
26 26
27from checklayer import LayerType, detect_layers, add_layers, add_layer_dependencies, get_layer_dependencies, get_signatures, check_bblayers 27from checklayer import LayerType, detect_layers, add_layers, add_layer_dependencies, get_layer_dependencies, get_signatures, check_bblayers, sanity_check_layers
28from oeqa.utils.commands import get_bb_vars 28from oeqa.utils.commands import get_bb_vars
29 29
30PROGNAME = 'yocto-check-layer' 30PROGNAME = 'yocto-check-layer'
@@ -41,6 +41,12 @@ def test_layer(td, layer, test_software_layer_signatures):
41 tc.loadTests(CASES_PATHS) 41 tc.loadTests(CASES_PATHS)
42 return tc.runTests() 42 return tc.runTests()
43 43
44def dump_layer_debug(layer):
45 logger.debug("Found layer %s (%s)" % (layer["name"], layer["path"]))
46 collections = layer.get("collections", {})
47 if collections:
48 logger.debug("%s collections: %s" % (layer["name"], ", ".join(collections)))
49
44def main(): 50def main():
45 parser = argparse.ArgumentParser( 51 parser = argparse.ArgumentParser(
46 description="Yocto Project layer checking tool", 52 description="Yocto Project layer checking tool",
@@ -106,6 +112,17 @@ def main():
106 else: 112 else:
107 dep_layers = layers 113 dep_layers = layers
108 114
115 logger.debug("Found additional layers:")
116 for l in additional_layers:
117 dump_layer_debug(l)
118 logger.debug("Found dependency layers:")
119 for l in dep_layers:
120 dump_layer_debug(l)
121
122 if not sanity_check_layers(additional_layers + dep_layers, logger):
123 logger.error("Failed layer validation")
124 return 1
125
109 logger.info("Detected layers:") 126 logger.info("Detected layers:")
110 for layer in layers: 127 for layer in layers:
111 if layer['type'] == LayerType.ERROR_BSP_DISTRO: 128 if layer['type'] == LayerType.ERROR_BSP_DISTRO: