diff options
Diffstat (limited to 'scripts')
38 files changed, 388 insertions, 270 deletions
diff --git a/scripts/bitbake-whatchanged b/scripts/bitbake-whatchanged index 3095dafa46..6f4b268119 100755 --- a/scripts/bitbake-whatchanged +++ b/scripts/bitbake-whatchanged | |||
@@ -217,7 +217,7 @@ print what will be done between the current and last builds, for example: | |||
217 | # Edit the recipes | 217 | # Edit the recipes |
218 | $ bitbake-whatchanged core-image-sato | 218 | $ bitbake-whatchanged core-image-sato |
219 | 219 | ||
220 | The changes will be printed" | 220 | The changes will be printed. |
221 | 221 | ||
222 | Note: | 222 | Note: |
223 | The amount of tasks is not accurate when the task is "do_build" since | 223 | The amount of tasks is not accurate when the task is "do_build" since |
diff --git a/scripts/buildhistory-diff b/scripts/buildhistory-diff index 833f7c33a5..02eedafd6e 100755 --- a/scripts/buildhistory-diff +++ b/scripts/buildhistory-diff | |||
@@ -11,7 +11,6 @@ | |||
11 | import sys | 11 | import sys |
12 | import os | 12 | import os |
13 | import argparse | 13 | import argparse |
14 | from distutils.version import LooseVersion | ||
15 | 14 | ||
16 | # Ensure PythonGit is installed (buildhistory_analysis needs it) | 15 | # Ensure PythonGit is installed (buildhistory_analysis needs it) |
17 | try: | 16 | try: |
@@ -71,10 +70,6 @@ def main(): | |||
71 | parser = get_args_parser() | 70 | parser = get_args_parser() |
72 | args = parser.parse_args() | 71 | args = parser.parse_args() |
73 | 72 | ||
74 | if LooseVersion(git.__version__) < '0.3.1': | ||
75 | sys.stderr.write("Version of GitPython is too old, please install GitPython (python-git) 0.3.1 or later in order to use this script\n") | ||
76 | sys.exit(1) | ||
77 | |||
78 | if len(args.revisions) > 2: | 73 | if len(args.revisions) > 2: |
79 | sys.stderr.write('Invalid argument(s) specified: %s\n\n' % ' '.join(args.revisions[2:])) | 74 | sys.stderr.write('Invalid argument(s) specified: %s\n\n' % ' '.join(args.revisions[2:])) |
80 | parser.print_help() | 75 | parser.print_help() |
diff --git a/scripts/contrib/build-perf-test-wrapper.sh b/scripts/contrib/build-perf-test-wrapper.sh index fa71d4a2e9..0a85e6e708 100755 --- a/scripts/contrib/build-perf-test-wrapper.sh +++ b/scripts/contrib/build-perf-test-wrapper.sh | |||
@@ -87,21 +87,10 @@ if [ $# -ne 0 ]; then | |||
87 | exit 1 | 87 | exit 1 |
88 | fi | 88 | fi |
89 | 89 | ||
90 | if [ -n "$email_to" ]; then | ||
91 | if ! [ -x "$(command -v phantomjs)" ]; then | ||
92 | echo "ERROR: Sending email needs phantomjs." | ||
93 | exit 1 | ||
94 | fi | ||
95 | if ! [ -x "$(command -v optipng)" ]; then | ||
96 | echo "ERROR: Sending email needs optipng." | ||
97 | exit 1 | ||
98 | fi | ||
99 | fi | ||
100 | |||
101 | # Open a file descriptor for flock and acquire lock | 90 | # Open a file descriptor for flock and acquire lock |
102 | LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock" | 91 | LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock" |
103 | if ! exec 3> "$LOCK_FILE"; then | 92 | if ! exec 3> "$LOCK_FILE"; then |
104 | echo "ERROR: Unable to open lock file" | 93 | echo "ERROR: Unable to open loemack file" |
105 | exit 1 | 94 | exit 1 |
106 | fi | 95 | fi |
107 | if ! flock -n 3; then | 96 | if ! flock -n 3; then |
@@ -226,7 +215,7 @@ if [ -n "$results_repo" ]; then | |||
226 | if [ -n "$email_to" ]; then | 215 | if [ -n "$email_to" ]; then |
227 | echo "Emailing test report" | 216 | echo "Emailing test report" |
228 | os_name=`get_os_release_var PRETTY_NAME` | 217 | os_name=`get_os_release_var PRETTY_NAME` |
229 | "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt --html $report_html "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}" | 218 | "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}" |
230 | fi | 219 | fi |
231 | 220 | ||
232 | # Upload report files, unless we're on detached head | 221 | # Upload report files, unless we're on detached head |
diff --git a/scripts/contrib/convert-srcuri.py b/scripts/contrib/convert-srcuri.py new file mode 100755 index 0000000000..5b362ea2e8 --- /dev/null +++ b/scripts/contrib/convert-srcuri.py | |||
@@ -0,0 +1,77 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # | ||
3 | # Conversion script to update SRC_URI to add branch to git urls | ||
4 | # | ||
5 | # Copyright (C) 2021 Richard Purdie | ||
6 | # | ||
7 | # SPDX-License-Identifier: GPL-2.0-only | ||
8 | # | ||
9 | |||
10 | import re | ||
11 | import os | ||
12 | import sys | ||
13 | import tempfile | ||
14 | import shutil | ||
15 | import mimetypes | ||
16 | |||
17 | if len(sys.argv) < 2: | ||
18 | print("Please specify a directory to run the conversion script against.") | ||
19 | sys.exit(1) | ||
20 | |||
21 | def processfile(fn): | ||
22 | def matchline(line): | ||
23 | if "MIRROR" in line or ".*" in line or "GNOME_GIT" in line: | ||
24 | return False | ||
25 | return True | ||
26 | print("processing file '%s'" % fn) | ||
27 | try: | ||
28 | if "distro_alias.inc" in fn or "linux-yocto-custom.bb" in fn: | ||
29 | return | ||
30 | fh, abs_path = tempfile.mkstemp() | ||
31 | modified = False | ||
32 | with os.fdopen(fh, 'w') as new_file: | ||
33 | with open(fn, "r") as old_file: | ||
34 | for line in old_file: | ||
35 | if ("git://" in line or "gitsm://" in line) and "branch=" not in line and matchline(line): | ||
36 | if line.endswith('"\n'): | ||
37 | line = line.replace('"\n', ';branch=master"\n') | ||
38 | elif line.endswith(" \\\n"): | ||
39 | line = line.replace(' \\\n', ';branch=master \\\n') | ||
40 | modified = True | ||
41 | if ("git://" in line or "gitsm://" in line) and "github.com" in line and "protocol=https" not in line and matchline(line): | ||
42 | if "protocol=git" in line: | ||
43 | line = line.replace('protocol=git', 'protocol=https') | ||
44 | elif line.endswith('"\n'): | ||
45 | line = line.replace('"\n', ';protocol=https"\n') | ||
46 | elif line.endswith(" \\\n"): | ||
47 | line = line.replace(' \\\n', ';protocol=https \\\n') | ||
48 | modified = True | ||
49 | new_file.write(line) | ||
50 | if modified: | ||
51 | shutil.copymode(fn, abs_path) | ||
52 | os.remove(fn) | ||
53 | shutil.move(abs_path, fn) | ||
54 | except UnicodeDecodeError: | ||
55 | pass | ||
56 | |||
57 | ourname = os.path.basename(sys.argv[0]) | ||
58 | ourversion = "0.1" | ||
59 | |||
60 | if os.path.isfile(sys.argv[1]): | ||
61 | processfile(sys.argv[1]) | ||
62 | sys.exit(0) | ||
63 | |||
64 | for targetdir in sys.argv[1:]: | ||
65 | print("processing directory '%s'" % targetdir) | ||
66 | for root, dirs, files in os.walk(targetdir): | ||
67 | for name in files: | ||
68 | if name == ourname: | ||
69 | continue | ||
70 | fn = os.path.join(root, name) | ||
71 | if os.path.islink(fn): | ||
72 | continue | ||
73 | if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"): | ||
74 | continue | ||
75 | processfile(fn) | ||
76 | |||
77 | print("All files processed with version %s" % ourversion) | ||
diff --git a/scripts/contrib/documentation-audit.sh b/scripts/contrib/documentation-audit.sh index 1191f57a8e..f436f9bae0 100755 --- a/scripts/contrib/documentation-audit.sh +++ b/scripts/contrib/documentation-audit.sh | |||
@@ -27,7 +27,7 @@ fi | |||
27 | 27 | ||
28 | echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results" | 28 | echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results" |
29 | echo "REMINDER: you need to set LICENSE_FLAGS_WHITELIST appropriately in local.conf or " | 29 | echo "REMINDER: you need to set LICENSE_FLAGS_WHITELIST appropriately in local.conf or " |
30 | echo " you'll get false positives. For example, LICENSE_FLAGS_WHITELIST = \"Commercial\"" | 30 | echo " you'll get false positives. For example, LICENSE_FLAGS_WHITELIST = \"commercial\"" |
31 | 31 | ||
32 | for pkg in `bitbake -s | awk '{ print \$1 }'`; do | 32 | for pkg in `bitbake -s | awk '{ print \$1 }'`; do |
33 | if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" || | 33 | if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" || |
diff --git a/scripts/contrib/oe-build-perf-report-email.py b/scripts/contrib/oe-build-perf-report-email.py index de3862c897..7192113c28 100755 --- a/scripts/contrib/oe-build-perf-report-email.py +++ b/scripts/contrib/oe-build-perf-report-email.py | |||
@@ -19,8 +19,6 @@ import socket | |||
19 | import subprocess | 19 | import subprocess |
20 | import sys | 20 | import sys |
21 | import tempfile | 21 | import tempfile |
22 | from email.mime.image import MIMEImage | ||
23 | from email.mime.multipart import MIMEMultipart | ||
24 | from email.mime.text import MIMEText | 22 | from email.mime.text import MIMEText |
25 | 23 | ||
26 | 24 | ||
@@ -29,30 +27,6 @@ logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") | |||
29 | log = logging.getLogger('oe-build-perf-report') | 27 | log = logging.getLogger('oe-build-perf-report') |
30 | 28 | ||
31 | 29 | ||
32 | # Find js scaper script | ||
33 | SCRAPE_JS = os.path.join(os.path.dirname(__file__), '..', 'lib', 'build_perf', | ||
34 | 'scrape-html-report.js') | ||
35 | if not os.path.isfile(SCRAPE_JS): | ||
36 | log.error("Unableto find oe-build-perf-report-scrape.js") | ||
37 | sys.exit(1) | ||
38 | |||
39 | |||
40 | class ReportError(Exception): | ||
41 | """Local errors""" | ||
42 | pass | ||
43 | |||
44 | |||
45 | def check_utils(): | ||
46 | """Check that all needed utils are installed in the system""" | ||
47 | missing = [] | ||
48 | for cmd in ('phantomjs', 'optipng'): | ||
49 | if not shutil.which(cmd): | ||
50 | missing.append(cmd) | ||
51 | if missing: | ||
52 | log.error("The following tools are missing: %s", ' '.join(missing)) | ||
53 | sys.exit(1) | ||
54 | |||
55 | |||
56 | def parse_args(argv): | 30 | def parse_args(argv): |
57 | """Parse command line arguments""" | 31 | """Parse command line arguments""" |
58 | description = """Email build perf test report""" | 32 | description = """Email build perf test report""" |
@@ -77,137 +51,19 @@ def parse_args(argv): | |||
77 | "the email parts") | 51 | "the email parts") |
78 | parser.add_argument('--text', | 52 | parser.add_argument('--text', |
79 | help="Plain text message") | 53 | help="Plain text message") |
80 | parser.add_argument('--html', | ||
81 | help="HTML peport generated by oe-build-perf-report") | ||
82 | parser.add_argument('--phantomjs-args', action='append', | ||
83 | help="Extra command line arguments passed to PhantomJS") | ||
84 | 54 | ||
85 | args = parser.parse_args(argv) | 55 | args = parser.parse_args(argv) |
86 | 56 | ||
87 | if not args.html and not args.text: | 57 | if not args.text: |
88 | parser.error("Please specify --html and/or --text") | 58 | parser.error("Please specify --text") |
89 | 59 | ||
90 | return args | 60 | return args |
91 | 61 | ||
92 | 62 | ||
93 | def decode_png(infile, outfile): | 63 | def send_email(text_fn, subject, recipients, copy=[], blind_copy=[]): |
94 | """Parse/decode/optimize png data from a html element""" | ||
95 | with open(infile) as f: | ||
96 | raw_data = f.read() | ||
97 | |||
98 | # Grab raw base64 data | ||
99 | b64_data = re.sub('^.*href="data:image/png;base64,', '', raw_data, 1) | ||
100 | b64_data = re.sub('">.+$', '', b64_data, 1) | ||
101 | |||
102 | # Replace file with proper decoded png | ||
103 | with open(outfile, 'wb') as f: | ||
104 | f.write(base64.b64decode(b64_data)) | ||
105 | |||
106 | subprocess.check_output(['optipng', outfile], stderr=subprocess.STDOUT) | ||
107 | |||
108 | |||
109 | def mangle_html_report(infile, outfile, pngs): | ||
110 | """Mangle html file into a email compatible format""" | ||
111 | paste = True | ||
112 | png_dir = os.path.dirname(outfile) | ||
113 | with open(infile) as f_in: | ||
114 | with open(outfile, 'w') as f_out: | ||
115 | for line in f_in.readlines(): | ||
116 | stripped = line.strip() | ||
117 | # Strip out scripts | ||
118 | if stripped == '<!--START-OF-SCRIPTS-->': | ||
119 | paste = False | ||
120 | elif stripped == '<!--END-OF-SCRIPTS-->': | ||
121 | paste = True | ||
122 | elif paste: | ||
123 | if re.match('^.+href="data:image/png;base64', stripped): | ||
124 | # Strip out encoded pngs (as they're huge in size) | ||
125 | continue | ||
126 | elif 'www.gstatic.com' in stripped: | ||
127 | # HACK: drop references to external static pages | ||
128 | continue | ||
129 | |||
130 | # Replace charts with <img> elements | ||
131 | match = re.match('<div id="(?P<id>\w+)"', stripped) | ||
132 | if match and match.group('id') in pngs: | ||
133 | f_out.write('<img src="cid:{}"\n'.format(match.group('id'))) | ||
134 | else: | ||
135 | f_out.write(line) | ||
136 | |||
137 | |||
138 | def scrape_html_report(report, outdir, phantomjs_extra_args=None): | ||
139 | """Scrape html report into a format sendable by email""" | ||
140 | tmpdir = tempfile.mkdtemp(dir='.') | ||
141 | log.debug("Using tmpdir %s for phantomjs output", tmpdir) | ||
142 | |||
143 | if not os.path.isdir(outdir): | ||
144 | os.mkdir(outdir) | ||
145 | if os.path.splitext(report)[1] not in ('.html', '.htm'): | ||
146 | raise ReportError("Invalid file extension for report, needs to be " | ||
147 | "'.html' or '.htm'") | ||
148 | |||
149 | try: | ||
150 | log.info("Scraping HTML report with PhangomJS") | ||
151 | extra_args = phantomjs_extra_args if phantomjs_extra_args else [] | ||
152 | subprocess.check_output(['phantomjs', '--debug=true'] + extra_args + | ||
153 | [SCRAPE_JS, report, tmpdir], | ||
154 | stderr=subprocess.STDOUT) | ||
155 | |||
156 | pngs = [] | ||
157 | images = [] | ||
158 | for fname in os.listdir(tmpdir): | ||
159 | base, ext = os.path.splitext(fname) | ||
160 | if ext == '.png': | ||
161 | log.debug("Decoding %s", fname) | ||
162 | decode_png(os.path.join(tmpdir, fname), | ||
163 | os.path.join(outdir, fname)) | ||
164 | pngs.append(base) | ||
165 | images.append(fname) | ||
166 | elif ext in ('.html', '.htm'): | ||
167 | report_file = fname | ||
168 | else: | ||
169 | log.warning("Unknown file extension: '%s'", ext) | ||
170 | #shutil.move(os.path.join(tmpdir, fname), outdir) | ||
171 | |||
172 | log.debug("Mangling html report file %s", report_file) | ||
173 | mangle_html_report(os.path.join(tmpdir, report_file), | ||
174 | os.path.join(outdir, report_file), pngs) | ||
175 | return (os.path.join(outdir, report_file), | ||
176 | [os.path.join(outdir, i) for i in images]) | ||
177 | finally: | ||
178 | shutil.rmtree(tmpdir) | ||
179 | |||
180 | def send_email(text_fn, html_fn, image_fns, subject, recipients, copy=[], | ||
181 | blind_copy=[]): | ||
182 | """Send email""" | ||
183 | # Generate email message | 64 | # Generate email message |
184 | text_msg = html_msg = None | 65 | with open(text_fn) as f: |
185 | if text_fn: | 66 | msg = MIMEText("Yocto build performance test report.\n" + f.read(), 'plain') |
186 | with open(text_fn) as f: | ||
187 | text_msg = MIMEText("Yocto build performance test report.\n" + | ||
188 | f.read(), 'plain') | ||
189 | if html_fn: | ||
190 | html_msg = msg = MIMEMultipart('related') | ||
191 | with open(html_fn) as f: | ||
192 | html_msg.attach(MIMEText(f.read(), 'html')) | ||
193 | for img_fn in image_fns: | ||
194 | # Expect that content id is same as the filename | ||
195 | cid = os.path.splitext(os.path.basename(img_fn))[0] | ||
196 | with open(img_fn, 'rb') as f: | ||
197 | image_msg = MIMEImage(f.read()) | ||
198 | image_msg['Content-ID'] = '<{}>'.format(cid) | ||
199 | html_msg.attach(image_msg) | ||
200 | |||
201 | if text_msg and html_msg: | ||
202 | msg = MIMEMultipart('alternative') | ||
203 | msg.attach(text_msg) | ||
204 | msg.attach(html_msg) | ||
205 | elif text_msg: | ||
206 | msg = text_msg | ||
207 | elif html_msg: | ||
208 | msg = html_msg | ||
209 | else: | ||
210 | raise ReportError("Neither plain text nor html body specified") | ||
211 | 67 | ||
212 | pw_data = pwd.getpwuid(os.getuid()) | 68 | pw_data = pwd.getpwuid(os.getuid()) |
213 | full_name = pw_data.pw_gecos.split(',')[0] | 69 | full_name = pw_data.pw_gecos.split(',')[0] |
@@ -234,8 +90,6 @@ def main(argv=None): | |||
234 | if args.debug: | 90 | if args.debug: |
235 | log.setLevel(logging.DEBUG) | 91 | log.setLevel(logging.DEBUG) |
236 | 92 | ||
237 | check_utils() | ||
238 | |||
239 | if args.outdir: | 93 | if args.outdir: |
240 | outdir = args.outdir | 94 | outdir = args.outdir |
241 | if not os.path.exists(outdir): | 95 | if not os.path.exists(outdir): |
@@ -245,25 +99,16 @@ def main(argv=None): | |||
245 | 99 | ||
246 | try: | 100 | try: |
247 | log.debug("Storing email parts in %s", outdir) | 101 | log.debug("Storing email parts in %s", outdir) |
248 | html_report = images = None | ||
249 | if args.html: | ||
250 | html_report, images = scrape_html_report(args.html, outdir, | ||
251 | args.phantomjs_args) | ||
252 | |||
253 | if args.to: | 102 | if args.to: |
254 | log.info("Sending email to %s", ', '.join(args.to)) | 103 | log.info("Sending email to %s", ', '.join(args.to)) |
255 | if args.cc: | 104 | if args.cc: |
256 | log.info("Copying to %s", ', '.join(args.cc)) | 105 | log.info("Copying to %s", ', '.join(args.cc)) |
257 | if args.bcc: | 106 | if args.bcc: |
258 | log.info("Blind copying to %s", ', '.join(args.bcc)) | 107 | log.info("Blind copying to %s", ', '.join(args.bcc)) |
259 | send_email(args.text, html_report, images, args.subject, | 108 | send_email(args.text, args.subject, args.to, args.cc, args.bcc) |
260 | args.to, args.cc, args.bcc) | ||
261 | except subprocess.CalledProcessError as err: | 109 | except subprocess.CalledProcessError as err: |
262 | log.error("%s, with output:\n%s", str(err), err.output.decode()) | 110 | log.error("%s, with output:\n%s", str(err), err.output.decode()) |
263 | return 1 | 111 | return 1 |
264 | except ReportError as err: | ||
265 | log.error(err) | ||
266 | return 1 | ||
267 | finally: | 112 | finally: |
268 | if not args.outdir: | 113 | if not args.outdir: |
269 | log.debug("Wiping %s", outdir) | 114 | log.debug("Wiping %s", outdir) |
diff --git a/scripts/create-pull-request b/scripts/create-pull-request index 8eefcf63a5..2f91a355b0 100755 --- a/scripts/create-pull-request +++ b/scripts/create-pull-request | |||
@@ -128,7 +128,7 @@ PROTO_RE="[a-z][a-z+]*://" | |||
128 | GIT_RE="\(^\($PROTO_RE\)\?\)\($USER_RE@\)\?\([^:/]*\)[:/]\(.*\)" | 128 | GIT_RE="\(^\($PROTO_RE\)\?\)\($USER_RE@\)\?\([^:/]*\)[:/]\(.*\)" |
129 | REMOTE_URL=${REMOTE_URL%.git} | 129 | REMOTE_URL=${REMOTE_URL%.git} |
130 | REMOTE_REPO=$(echo $REMOTE_URL | sed "s#$GIT_RE#\5#") | 130 | REMOTE_REPO=$(echo $REMOTE_URL | sed "s#$GIT_RE#\5#") |
131 | REMOTE_URL=$(echo $REMOTE_URL | sed "s#$GIT_RE#git://\4/\5#") | 131 | REMOTE_URL=$(echo $REMOTE_URL | sed "s#$GIT_RE#https://\4/\5#") |
132 | 132 | ||
133 | if [ -z "$BRANCH" ]; then | 133 | if [ -z "$BRANCH" ]; then |
134 | BRANCH=$(git branch | grep -e "^\* " | cut -d' ' -f2) | 134 | BRANCH=$(git branch | grep -e "^\* " | cut -d' ' -f2) |
diff --git a/scripts/git b/scripts/git new file mode 100755 index 0000000000..644055e540 --- /dev/null +++ b/scripts/git | |||
@@ -0,0 +1,26 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # | ||
3 | # Wrapper around 'git' that doesn't think we are root | ||
4 | |||
5 | import os | ||
6 | import shutil | ||
7 | import sys | ||
8 | |||
9 | os.environ['PSEUDO_UNLOAD'] = '1' | ||
10 | |||
11 | # calculate path to the real 'git' | ||
12 | path = os.environ['PATH'] | ||
13 | # we need to remove our path but also any other copy of this script which | ||
14 | # may be present, e.g. eSDK. | ||
15 | replacements = [os.path.dirname(sys.argv[0])] | ||
16 | for p in path.split(":"): | ||
17 | if p.endswith("/scripts"): | ||
18 | replacements.append(p) | ||
19 | for r in replacements: | ||
20 | path = path.replace(r, '/ignoreme') | ||
21 | real_git = shutil.which('git', path=path) | ||
22 | |||
23 | if len(sys.argv) == 1: | ||
24 | os.execl(real_git, 'git') | ||
25 | |||
26 | os.execv(real_git, sys.argv) | ||
diff --git a/scripts/lib/buildstats.py b/scripts/lib/buildstats.py index c69b5bf4d7..3b76286ba5 100644 --- a/scripts/lib/buildstats.py +++ b/scripts/lib/buildstats.py | |||
@@ -8,7 +8,7 @@ import json | |||
8 | import logging | 8 | import logging |
9 | import os | 9 | import os |
10 | import re | 10 | import re |
11 | from collections import namedtuple,OrderedDict | 11 | from collections import namedtuple |
12 | from statistics import mean | 12 | from statistics import mean |
13 | 13 | ||
14 | 14 | ||
@@ -238,7 +238,7 @@ class BuildStats(dict): | |||
238 | subdirs = os.listdir(path) | 238 | subdirs = os.listdir(path) |
239 | for dirname in subdirs: | 239 | for dirname in subdirs: |
240 | recipe_dir = os.path.join(path, dirname) | 240 | recipe_dir = os.path.join(path, dirname) |
241 | if not os.path.isdir(recipe_dir): | 241 | if dirname == "reduced_proc_pressure" or not os.path.isdir(recipe_dir): |
242 | continue | 242 | continue |
243 | name, epoch, version, revision = cls.split_nevr(dirname) | 243 | name, epoch, version, revision = cls.split_nevr(dirname) |
244 | bsrecipe = BSRecipe(name, epoch, version, revision) | 244 | bsrecipe = BSRecipe(name, epoch, version, revision) |
diff --git a/scripts/lib/checklayer/__init__.py b/scripts/lib/checklayer/__init__.py index fe545607bb..e69a10f452 100644 --- a/scripts/lib/checklayer/__init__.py +++ b/scripts/lib/checklayer/__init__.py | |||
@@ -146,7 +146,7 @@ def detect_layers(layer_directories, no_auto): | |||
146 | 146 | ||
147 | return layers | 147 | return layers |
148 | 148 | ||
149 | def _find_layer_depends(depend, layers): | 149 | def _find_layer(depend, layers): |
150 | for layer in layers: | 150 | for layer in layers: |
151 | if 'collections' not in layer: | 151 | if 'collections' not in layer: |
152 | continue | 152 | continue |
@@ -156,7 +156,7 @@ def _find_layer_depends(depend, layers): | |||
156 | return layer | 156 | return layer |
157 | return None | 157 | return None |
158 | 158 | ||
159 | def add_layer_dependencies(bblayersconf, layer, layers, logger): | 159 | def get_layer_dependencies(layer, layers, logger): |
160 | def recurse_dependencies(depends, layer, layers, logger, ret = []): | 160 | def recurse_dependencies(depends, layer, layers, logger, ret = []): |
161 | logger.debug('Processing dependencies %s for layer %s.' % \ | 161 | logger.debug('Processing dependencies %s for layer %s.' % \ |
162 | (depends, layer['name'])) | 162 | (depends, layer['name'])) |
@@ -166,7 +166,7 @@ def add_layer_dependencies(bblayersconf, layer, layers, logger): | |||
166 | if depend == 'core': | 166 | if depend == 'core': |
167 | continue | 167 | continue |
168 | 168 | ||
169 | layer_depend = _find_layer_depends(depend, layers) | 169 | layer_depend = _find_layer(depend, layers) |
170 | if not layer_depend: | 170 | if not layer_depend: |
171 | logger.error('Layer %s depends on %s and isn\'t found.' % \ | 171 | logger.error('Layer %s depends on %s and isn\'t found.' % \ |
172 | (layer['name'], depend)) | 172 | (layer['name'], depend)) |
@@ -203,6 +203,11 @@ def add_layer_dependencies(bblayersconf, layer, layers, logger): | |||
203 | layer_depends = recurse_dependencies(depends, layer, layers, logger, layer_depends) | 203 | layer_depends = recurse_dependencies(depends, layer, layers, logger, layer_depends) |
204 | 204 | ||
205 | # Note: [] (empty) is allowed, None is not! | 205 | # Note: [] (empty) is allowed, None is not! |
206 | return layer_depends | ||
207 | |||
208 | def add_layer_dependencies(bblayersconf, layer, layers, logger): | ||
209 | |||
210 | layer_depends = get_layer_dependencies(layer, layers, logger) | ||
206 | if layer_depends is None: | 211 | if layer_depends is None: |
207 | return False | 212 | return False |
208 | else: | 213 | else: |
diff --git a/scripts/lib/checklayer/cases/common.py b/scripts/lib/checklayer/cases/common.py index b82304e361..4495f71b24 100644 --- a/scripts/lib/checklayer/cases/common.py +++ b/scripts/lib/checklayer/cases/common.py | |||
@@ -14,7 +14,7 @@ class CommonCheckLayer(OECheckLayerTestCase): | |||
14 | # The top-level README file may have a suffix (like README.rst or README.txt). | 14 | # The top-level README file may have a suffix (like README.rst or README.txt). |
15 | readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*')) | 15 | readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*')) |
16 | self.assertTrue(len(readme_files) > 0, | 16 | self.assertTrue(len(readme_files) > 0, |
17 | msg="Layer doesn't contains README file.") | 17 | msg="Layer doesn't contain a README file.") |
18 | 18 | ||
19 | # There might be more than one file matching the file pattern above | 19 | # There might be more than one file matching the file pattern above |
20 | # (for example, README.rst and README-COPYING.rst). The one with the shortest | 20 | # (for example, README.rst and README-COPYING.rst). The one with the shortest |
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py index aaa25dda08..b4f9fbfe45 100644 --- a/scripts/lib/devtool/deploy.py +++ b/scripts/lib/devtool/deploy.py | |||
@@ -168,9 +168,9 @@ def deploy(args, config, basepath, workspace): | |||
168 | if args.strip and not args.dry_run: | 168 | if args.strip and not args.dry_run: |
169 | # Fakeroot copy to new destination | 169 | # Fakeroot copy to new destination |
170 | srcdir = recipe_outdir | 170 | srcdir = recipe_outdir |
171 | recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'deploy-target-stripped') | 171 | recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'devtool-deploy-target-stripped') |
172 | if os.path.isdir(recipe_outdir): | 172 | if os.path.isdir(recipe_outdir): |
173 | bb.utils.remove(recipe_outdir, True) | 173 | exec_fakeroot(rd, "rm -rf %s" % recipe_outdir, shell=True) |
174 | exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True) | 174 | exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True) |
175 | os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or '']) | 175 | os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or '']) |
176 | oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'), | 176 | oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'), |
@@ -201,9 +201,9 @@ def deploy(args, config, basepath, workspace): | |||
201 | print(' %s' % item) | 201 | print(' %s' % item) |
202 | return 0 | 202 | return 0 |
203 | 203 | ||
204 | extraoptions = '' | 204 | extraoptions = '-o HostKeyAlgorithms=+ssh-rsa' |
205 | if args.no_host_check: | 205 | if args.no_host_check: |
206 | extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' | 206 | extraoptions += ' -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' |
207 | if not args.show_status: | 207 | if not args.show_status: |
208 | extraoptions += ' -q' | 208 | extraoptions += ' -q' |
209 | 209 | ||
@@ -274,9 +274,9 @@ def undeploy(args, config, basepath, workspace): | |||
274 | elif not args.recipename and not args.all: | 274 | elif not args.recipename and not args.all: |
275 | raise argparse_oe.ArgumentUsageError('If you don\'t specify a recipe, you must specify -a/--all', 'undeploy-target') | 275 | raise argparse_oe.ArgumentUsageError('If you don\'t specify a recipe, you must specify -a/--all', 'undeploy-target') |
276 | 276 | ||
277 | extraoptions = '' | 277 | extraoptions = '-o HostKeyAlgorithms=+ssh-rsa' |
278 | if args.no_host_check: | 278 | if args.no_host_check: |
279 | extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' | 279 | extraoptions += ' -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' |
280 | if not args.show_status: | 280 | if not args.show_status: |
281 | extraoptions += ' -q' | 281 | extraoptions += ' -q' |
282 | 282 | ||
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py index 95384c5333..ff9227035d 100644 --- a/scripts/lib/devtool/menuconfig.py +++ b/scripts/lib/devtool/menuconfig.py | |||
@@ -43,7 +43,7 @@ def menuconfig(args, config, basepath, workspace): | |||
43 | return 1 | 43 | return 1 |
44 | 44 | ||
45 | check_workspace_recipe(workspace, args.component) | 45 | check_workspace_recipe(workspace, args.component) |
46 | pn = rd.getVar('PN', True) | 46 | pn = rd.getVar('PN') |
47 | 47 | ||
48 | if not rd.getVarFlag('do_menuconfig','task'): | 48 | if not rd.getVarFlag('do_menuconfig','task'): |
49 | raise DevtoolError("This recipe does not support menuconfig option") | 49 | raise DevtoolError("This recipe does not support menuconfig option") |
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index 7b62b7e7b8..cfa88616af 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py | |||
@@ -357,7 +357,7 @@ def _move_file(src, dst, dry_run_outdir=None, base_outdir=None): | |||
357 | bb.utils.mkdirhier(dst_d) | 357 | bb.utils.mkdirhier(dst_d) |
358 | shutil.move(src, dst) | 358 | shutil.move(src, dst) |
359 | 359 | ||
360 | def _copy_file(src, dst, dry_run_outdir=None): | 360 | def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None): |
361 | """Copy a file. Creates all the directory components of destination path.""" | 361 | """Copy a file. Creates all the directory components of destination path.""" |
362 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' | 362 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' |
363 | logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) | 363 | logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) |
@@ -953,12 +953,17 @@ def modify(args, config, basepath, workspace): | |||
953 | 953 | ||
954 | if bb.data.inherits_class('kernel', rd): | 954 | if bb.data.inherits_class('kernel', rd): |
955 | f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' | 955 | f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' |
956 | 'do_fetch do_unpack do_kernel_configme do_kernel_configcheck"\n') | 956 | 'do_fetch do_unpack do_kernel_configcheck"\n') |
957 | f.write('\ndo_patch[noexec] = "1"\n') | 957 | f.write('\ndo_patch[noexec] = "1"\n') |
958 | f.write('\ndo_configure_append() {\n' | 958 | f.write('\ndo_configure_append() {\n' |
959 | ' cp ${B}/.config ${S}/.config.baseline\n' | 959 | ' cp ${B}/.config ${S}/.config.baseline\n' |
960 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | 960 | ' ln -sfT ${B}/.config ${S}/.config.new\n' |
961 | '}\n') | 961 | '}\n') |
962 | f.write('\ndo_kernel_configme_prepend() {\n' | ||
963 | ' if [ -e ${S}/.config ]; then\n' | ||
964 | ' mv ${S}/.config ${S}/.config.old\n' | ||
965 | ' fi\n' | ||
966 | '}\n') | ||
962 | if rd.getVarFlag('do_menuconfig','task'): | 967 | if rd.getVarFlag('do_menuconfig','task'): |
963 | f.write('\ndo_configure_append() {\n' | 968 | f.write('\ndo_configure_append() {\n' |
964 | ' if [ ! ${DEVTOOL_DISABLE_MENUCONFIG} ]; then\n' | 969 | ' if [ ! ${DEVTOOL_DISABLE_MENUCONFIG} ]; then\n' |
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py index 566c75369a..a2c6d052a6 100644 --- a/scripts/lib/recipetool/create.py +++ b/scripts/lib/recipetool/create.py | |||
@@ -435,7 +435,7 @@ def create_recipe(args): | |||
435 | if args.binary: | 435 | if args.binary: |
436 | # Assume the archive contains the directory structure verbatim | 436 | # Assume the archive contains the directory structure verbatim |
437 | # so we need to extract to a subdirectory | 437 | # so we need to extract to a subdirectory |
438 | fetchuri += ';subdir=${BP}' | 438 | fetchuri += ';subdir=${BPN}' |
439 | srcuri = fetchuri | 439 | srcuri = fetchuri |
440 | rev_re = re.compile(';rev=([^;]+)') | 440 | rev_re = re.compile(';rev=([^;]+)') |
441 | res = rev_re.search(srcuri) | 441 | res = rev_re.search(srcuri) |
@@ -478,6 +478,9 @@ def create_recipe(args): | |||
478 | storeTagName = params['tag'] | 478 | storeTagName = params['tag'] |
479 | params['nobranch'] = '1' | 479 | params['nobranch'] = '1' |
480 | del params['tag'] | 480 | del params['tag'] |
481 | # Assume 'master' branch if not set | ||
482 | if scheme in ['git', 'gitsm'] and 'branch' not in params and 'nobranch' not in params: | ||
483 | params['branch'] = 'master' | ||
481 | fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) | 484 | fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) |
482 | 485 | ||
483 | tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') | 486 | tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') |
@@ -527,10 +530,9 @@ def create_recipe(args): | |||
527 | # Remove HEAD reference point and drop remote prefix | 530 | # Remove HEAD reference point and drop remote prefix |
528 | get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] | 531 | get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] |
529 | if 'master' in get_branch: | 532 | if 'master' in get_branch: |
530 | # If it is master, we do not need to append 'branch=master' as this is default. | ||
531 | # Even with the case where get_branch has multiple objects, if 'master' is one | 533 | # Even with the case where get_branch has multiple objects, if 'master' is one |
532 | # of them, we should default take from 'master' | 534 | # of them, we should default take from 'master' |
533 | srcbranch = '' | 535 | srcbranch = 'master' |
534 | elif len(get_branch) == 1: | 536 | elif len(get_branch) == 1: |
535 | # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' | 537 | # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' |
536 | srcbranch = get_branch[0] | 538 | srcbranch = get_branch[0] |
@@ -543,8 +545,8 @@ def create_recipe(args): | |||
543 | # Since we might have a value in srcbranch, we need to | 545 | # Since we might have a value in srcbranch, we need to |
544 | # recontruct the srcuri to include 'branch' in params. | 546 | # recontruct the srcuri to include 'branch' in params. |
545 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri) | 547 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri) |
546 | if srcbranch: | 548 | if scheme in ['git', 'gitsm']: |
547 | params['branch'] = srcbranch | 549 | params['branch'] = srcbranch or 'master' |
548 | 550 | ||
549 | if storeTagName and scheme in ['git', 'gitsm']: | 551 | if storeTagName and scheme in ['git', 'gitsm']: |
550 | # Check srcrev using tag and check validity of the tag | 552 | # Check srcrev using tag and check validity of the tag |
@@ -603,7 +605,7 @@ def create_recipe(args): | |||
603 | splitline = line.split() | 605 | splitline = line.split() |
604 | if len(splitline) > 1: | 606 | if len(splitline) > 1: |
605 | if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): | 607 | if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): |
606 | srcuri = reformat_git_uri(splitline[1]) | 608 | srcuri = reformat_git_uri(splitline[1]) + ';branch=master' |
607 | srcsubdir = 'git' | 609 | srcsubdir = 'git' |
608 | break | 610 | break |
609 | 611 | ||
@@ -743,6 +745,10 @@ def create_recipe(args): | |||
743 | for handler in handlers: | 745 | for handler in handlers: |
744 | handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues) | 746 | handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues) |
745 | 747 | ||
748 | # native and nativesdk classes are special and must be inherited last | ||
749 | # If present, put them at the end of the classes list | ||
750 | classes.sort(key=lambda c: c in ("native", "nativesdk")) | ||
751 | |||
746 | extrafiles = extravalues.pop('extrafiles', {}) | 752 | extrafiles = extravalues.pop('extrafiles', {}) |
747 | extra_pn = extravalues.pop('PN', None) | 753 | extra_pn = extravalues.pop('PN', None) |
748 | extra_pv = extravalues.pop('PV', None) | 754 | extra_pv = extravalues.pop('PV', None) |
diff --git a/scripts/lib/resulttool/report.py b/scripts/lib/resulttool/report.py index f0ca50ebe2..a349510ab8 100644 --- a/scripts/lib/resulttool/report.py +++ b/scripts/lib/resulttool/report.py | |||
@@ -176,7 +176,10 @@ class ResultsTextReport(object): | |||
176 | vals['sort'] = line['testseries'] + "_" + line['result_id'] | 176 | vals['sort'] = line['testseries'] + "_" + line['result_id'] |
177 | vals['failed_testcases'] = line['failed_testcases'] | 177 | vals['failed_testcases'] = line['failed_testcases'] |
178 | for k in cols: | 178 | for k in cols: |
179 | vals[k] = "%d (%s%%)" % (line[k], format(line[k] / total_tested * 100, '.0f')) | 179 | if total_tested: |
180 | vals[k] = "%d (%s%%)" % (line[k], format(line[k] / total_tested * 100, '.0f')) | ||
181 | else: | ||
182 | vals[k] = "0 (0%)" | ||
180 | for k in maxlen: | 183 | for k in maxlen: |
181 | if k in vals and len(vals[k]) > maxlen[k]: | 184 | if k in vals and len(vals[k]) > maxlen[k]: |
182 | maxlen[k] = len(vals[k]) | 185 | maxlen[k] = len(vals[k]) |
diff --git a/scripts/lib/resulttool/resultutils.py b/scripts/lib/resulttool/resultutils.py index 8917022d36..c5521d81bd 100644 --- a/scripts/lib/resulttool/resultutils.py +++ b/scripts/lib/resulttool/resultutils.py | |||
@@ -58,7 +58,11 @@ def append_resultsdata(results, f, configmap=store_map, configvars=extra_configv | |||
58 | testseries = posixpath.basename(posixpath.dirname(url.path)) | 58 | testseries = posixpath.basename(posixpath.dirname(url.path)) |
59 | else: | 59 | else: |
60 | with open(f, "r") as filedata: | 60 | with open(f, "r") as filedata: |
61 | data = json.load(filedata) | 61 | try: |
62 | data = json.load(filedata) | ||
63 | except json.decoder.JSONDecodeError: | ||
64 | print("Cannot decode {}. Possible corruption. Skipping.".format(f)) | ||
65 | data = "" | ||
62 | testseries = os.path.basename(os.path.dirname(f)) | 66 | testseries = os.path.basename(os.path.dirname(f)) |
63 | else: | 67 | else: |
64 | data = f | 68 | data = f |
@@ -142,7 +146,7 @@ def generic_get_log(sectionname, results, section): | |||
142 | return decode_log(ptest['log']) | 146 | return decode_log(ptest['log']) |
143 | 147 | ||
144 | def ptestresult_get_log(results, section): | 148 | def ptestresult_get_log(results, section): |
145 | return generic_get_log('ptestresuls.sections', results, section) | 149 | return generic_get_log('ptestresult.sections', results, section) |
146 | 150 | ||
147 | def generic_get_rawlogs(sectname, results): | 151 | def generic_get_rawlogs(sectname, results): |
148 | if sectname not in results: | 152 | if sectname not in results: |
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py index f92255d8dc..47a08194d0 100644 --- a/scripts/lib/scriptutils.py +++ b/scripts/lib/scriptutils.py | |||
@@ -18,7 +18,8 @@ import sys | |||
18 | import tempfile | 18 | import tempfile |
19 | import threading | 19 | import threading |
20 | import importlib | 20 | import importlib |
21 | from importlib import machinery | 21 | import importlib.machinery |
22 | import importlib.util | ||
22 | 23 | ||
23 | class KeepAliveStreamHandler(logging.StreamHandler): | 24 | class KeepAliveStreamHandler(logging.StreamHandler): |
24 | def __init__(self, keepalive=True, **kwargs): | 25 | def __init__(self, keepalive=True, **kwargs): |
@@ -82,7 +83,9 @@ def load_plugins(logger, plugins, pluginpath): | |||
82 | logger.debug('Loading plugin %s' % name) | 83 | logger.debug('Loading plugin %s' % name) |
83 | spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) | 84 | spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) |
84 | if spec: | 85 | if spec: |
85 | return spec.loader.load_module() | 86 | mod = importlib.util.module_from_spec(spec) |
87 | spec.loader.exec_module(mod) | ||
88 | return mod | ||
86 | 89 | ||
87 | def plugin_name(filename): | 90 | def plugin_name(filename): |
88 | return os.path.splitext(os.path.basename(filename))[0] | 91 | return os.path.splitext(os.path.basename(filename))[0] |
@@ -215,7 +218,8 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr | |||
215 | pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE'] | 218 | pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE'] |
216 | for pathvar in pathvars: | 219 | for pathvar in pathvars: |
217 | path = rd.getVar(pathvar) | 220 | path = rd.getVar(pathvar) |
218 | shutil.rmtree(path) | 221 | if os.path.exists(path): |
222 | shutil.rmtree(path) | ||
219 | finally: | 223 | finally: |
220 | if fetchrecipe: | 224 | if fetchrecipe: |
221 | try: | 225 | try: |
diff --git a/scripts/lib/wic/engine.py b/scripts/lib/wic/engine.py index 9ff4394757..7dbde85696 100644 --- a/scripts/lib/wic/engine.py +++ b/scripts/lib/wic/engine.py | |||
@@ -19,10 +19,10 @@ import os | |||
19 | import tempfile | 19 | import tempfile |
20 | import json | 20 | import json |
21 | import subprocess | 21 | import subprocess |
22 | import shutil | ||
22 | import re | 23 | import re |
23 | 24 | ||
24 | from collections import namedtuple, OrderedDict | 25 | from collections import namedtuple, OrderedDict |
25 | from distutils.spawn import find_executable | ||
26 | 26 | ||
27 | from wic import WicError | 27 | from wic import WicError |
28 | from wic.filemap import sparse_copy | 28 | from wic.filemap import sparse_copy |
@@ -245,7 +245,7 @@ class Disk: | |||
245 | for path in pathlist.split(':'): | 245 | for path in pathlist.split(':'): |
246 | self.paths = "%s%s:%s" % (native_sysroot, path, self.paths) | 246 | self.paths = "%s%s:%s" % (native_sysroot, path, self.paths) |
247 | 247 | ||
248 | self.parted = find_executable("parted", self.paths) | 248 | self.parted = shutil.which("parted", path=self.paths) |
249 | if not self.parted: | 249 | if not self.parted: |
250 | raise WicError("Can't find executable parted") | 250 | raise WicError("Can't find executable parted") |
251 | 251 | ||
@@ -283,7 +283,7 @@ class Disk: | |||
283 | "resize2fs", "mkswap", "mkdosfs", "debugfs"): | 283 | "resize2fs", "mkswap", "mkdosfs", "debugfs"): |
284 | aname = "_%s" % name | 284 | aname = "_%s" % name |
285 | if aname not in self.__dict__: | 285 | if aname not in self.__dict__: |
286 | setattr(self, aname, find_executable(name, self.paths)) | 286 | setattr(self, aname, shutil.which(name, path=self.paths)) |
287 | if aname not in self.__dict__ or self.__dict__[aname] is None: | 287 | if aname not in self.__dict__ or self.__dict__[aname] is None: |
288 | raise WicError("Can't find executable '{}'".format(name)) | 288 | raise WicError("Can't find executable '{}'".format(name)) |
289 | return self.__dict__[aname] | 289 | return self.__dict__[aname] |
diff --git a/scripts/lib/wic/help.py b/scripts/lib/wic/help.py index 62a2a90e79..fcace95ff4 100644 --- a/scripts/lib/wic/help.py +++ b/scripts/lib/wic/help.py | |||
@@ -840,8 +840,8 @@ DESCRIPTION | |||
840 | meanings. The commands are based on the Fedora kickstart | 840 | meanings. The commands are based on the Fedora kickstart |
841 | documentation but with modifications to reflect wic capabilities. | 841 | documentation but with modifications to reflect wic capabilities. |
842 | 842 | ||
843 | http://fedoraproject.org/wiki/Anaconda/Kickstart#part_or_partition | 843 | https://pykickstart.readthedocs.io/en/latest/kickstart-docs.html#part-or-partition |
844 | http://fedoraproject.org/wiki/Anaconda/Kickstart#bootloader | 844 | https://pykickstart.readthedocs.io/en/latest/kickstart-docs.html#bootloader |
845 | 845 | ||
846 | Commands | 846 | Commands |
847 | 847 | ||
diff --git a/scripts/lib/wic/misc.py b/scripts/lib/wic/misc.py index e4b5a0d519..3e11822996 100644 --- a/scripts/lib/wic/misc.py +++ b/scripts/lib/wic/misc.py | |||
@@ -16,9 +16,9 @@ import logging | |||
16 | import os | 16 | import os |
17 | import re | 17 | import re |
18 | import subprocess | 18 | import subprocess |
19 | import shutil | ||
19 | 20 | ||
20 | from collections import defaultdict | 21 | from collections import defaultdict |
21 | from distutils import spawn | ||
22 | 22 | ||
23 | from wic import WicError | 23 | from wic import WicError |
24 | 24 | ||
@@ -26,6 +26,7 @@ logger = logging.getLogger('wic') | |||
26 | 26 | ||
27 | # executable -> recipe pairs for exec_native_cmd | 27 | # executable -> recipe pairs for exec_native_cmd |
28 | NATIVE_RECIPES = {"bmaptool": "bmap-tools", | 28 | NATIVE_RECIPES = {"bmaptool": "bmap-tools", |
29 | "dumpe2fs": "e2fsprogs", | ||
29 | "grub-mkimage": "grub-efi", | 30 | "grub-mkimage": "grub-efi", |
30 | "isohybrid": "syslinux", | 31 | "isohybrid": "syslinux", |
31 | "mcopy": "mtools", | 32 | "mcopy": "mtools", |
@@ -45,7 +46,8 @@ NATIVE_RECIPES = {"bmaptool": "bmap-tools", | |||
45 | "parted": "parted", | 46 | "parted": "parted", |
46 | "sfdisk": "util-linux", | 47 | "sfdisk": "util-linux", |
47 | "sgdisk": "gptfdisk", | 48 | "sgdisk": "gptfdisk", |
48 | "syslinux": "syslinux" | 49 | "syslinux": "syslinux", |
50 | "tar": "tar" | ||
49 | } | 51 | } |
50 | 52 | ||
51 | def runtool(cmdln_or_args): | 53 | def runtool(cmdln_or_args): |
@@ -112,6 +114,15 @@ def exec_cmd(cmd_and_args, as_shell=False): | |||
112 | """ | 114 | """ |
113 | return _exec_cmd(cmd_and_args, as_shell)[1] | 115 | return _exec_cmd(cmd_and_args, as_shell)[1] |
114 | 116 | ||
117 | def find_executable(cmd, paths): | ||
118 | recipe = cmd | ||
119 | if recipe in NATIVE_RECIPES: | ||
120 | recipe = NATIVE_RECIPES[recipe] | ||
121 | provided = get_bitbake_var("ASSUME_PROVIDED") | ||
122 | if provided and "%s-native" % recipe in provided: | ||
123 | return True | ||
124 | |||
125 | return shutil.which(cmd, path=paths) | ||
115 | 126 | ||
116 | def exec_native_cmd(cmd_and_args, native_sysroot, pseudo=""): | 127 | def exec_native_cmd(cmd_and_args, native_sysroot, pseudo=""): |
117 | """ | 128 | """ |
@@ -140,7 +151,7 @@ def exec_native_cmd(cmd_and_args, native_sysroot, pseudo=""): | |||
140 | logger.debug("exec_native_cmd: %s", native_cmd_and_args) | 151 | logger.debug("exec_native_cmd: %s", native_cmd_and_args) |
141 | 152 | ||
142 | # If the command isn't in the native sysroot say we failed. | 153 | # If the command isn't in the native sysroot say we failed. |
143 | if spawn.find_executable(args[0], native_paths): | 154 | if find_executable(args[0], native_paths): |
144 | ret, out = _exec_cmd(native_cmd_and_args, True) | 155 | ret, out = _exec_cmd(native_cmd_and_args, True) |
145 | else: | 156 | else: |
146 | ret = 127 | 157 | ret = 127 |
diff --git a/scripts/lib/wic/partition.py b/scripts/lib/wic/partition.py index e574f40c47..792bb3dcd3 100644 --- a/scripts/lib/wic/partition.py +++ b/scripts/lib/wic/partition.py | |||
@@ -104,7 +104,7 @@ class Partition(): | |||
104 | extra_blocks = self.extra_space | 104 | extra_blocks = self.extra_space |
105 | 105 | ||
106 | rootfs_size = actual_rootfs_size + extra_blocks | 106 | rootfs_size = actual_rootfs_size + extra_blocks |
107 | rootfs_size *= self.overhead_factor | 107 | rootfs_size = int(rootfs_size * self.overhead_factor) |
108 | 108 | ||
109 | logger.debug("Added %d extra blocks to %s to get to %d total blocks", | 109 | logger.debug("Added %d extra blocks to %s to get to %d total blocks", |
110 | extra_blocks, self.mountpoint, rootfs_size) | 110 | extra_blocks, self.mountpoint, rootfs_size) |
@@ -298,6 +298,8 @@ class Partition(): | |||
298 | mkfs_cmd = "fsck.%s -pvfD %s" % (self.fstype, rootfs) | 298 | mkfs_cmd = "fsck.%s -pvfD %s" % (self.fstype, rootfs) |
299 | exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) | 299 | exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) |
300 | 300 | ||
301 | self.check_for_Y2038_problem(rootfs, native_sysroot) | ||
302 | |||
301 | def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, | 303 | def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, |
302 | native_sysroot, pseudo): | 304 | native_sysroot, pseudo): |
303 | """ | 305 | """ |
@@ -388,6 +390,8 @@ class Partition(): | |||
388 | (self.fstype, extraopts, label_str, self.fsuuid, rootfs) | 390 | (self.fstype, extraopts, label_str, self.fsuuid, rootfs) |
389 | exec_native_cmd(mkfs_cmd, native_sysroot) | 391 | exec_native_cmd(mkfs_cmd, native_sysroot) |
390 | 392 | ||
393 | self.check_for_Y2038_problem(rootfs, native_sysroot) | ||
394 | |||
391 | def prepare_empty_partition_btrfs(self, rootfs, oe_builddir, | 395 | def prepare_empty_partition_btrfs(self, rootfs, oe_builddir, |
392 | native_sysroot): | 396 | native_sysroot): |
393 | """ | 397 | """ |
@@ -449,3 +453,37 @@ class Partition(): | |||
449 | 453 | ||
450 | mkswap_cmd = "mkswap %s -U %s %s" % (label_str, self.fsuuid, path) | 454 | mkswap_cmd = "mkswap %s -U %s %s" % (label_str, self.fsuuid, path) |
451 | exec_native_cmd(mkswap_cmd, native_sysroot) | 455 | exec_native_cmd(mkswap_cmd, native_sysroot) |
456 | |||
457 | def check_for_Y2038_problem(self, rootfs, native_sysroot): | ||
458 | """ | ||
459 | Check if the filesystem is affected by the Y2038 problem | ||
460 | (Y2038 problem = 32 bit time_t overflow in January 2038) | ||
461 | """ | ||
462 | def get_err_str(part): | ||
463 | err = "The {} filesystem {} has no Y2038 support." | ||
464 | if part.mountpoint: | ||
465 | args = [part.fstype, "mounted at %s" % part.mountpoint] | ||
466 | elif part.label: | ||
467 | args = [part.fstype, "labeled '%s'" % part.label] | ||
468 | elif part.part_name: | ||
469 | args = [part.fstype, "in partition '%s'" % part.part_name] | ||
470 | else: | ||
471 | args = [part.fstype, "in partition %s" % part.num] | ||
472 | return err.format(*args) | ||
473 | |||
474 | # ext2 and ext3 are always affected by the Y2038 problem | ||
475 | if self.fstype in ["ext2", "ext3"]: | ||
476 | logger.warn(get_err_str(self)) | ||
477 | return | ||
478 | |||
479 | ret, out = exec_native_cmd("dumpe2fs %s" % rootfs, native_sysroot) | ||
480 | |||
481 | # if ext4 is affected by the Y2038 problem depends on the inode size | ||
482 | for line in out.splitlines(): | ||
483 | if line.startswith("Inode size:"): | ||
484 | size = int(line.split(":")[1].strip()) | ||
485 | if size < 256: | ||
486 | logger.warn("%s Inodes (of size %d) are too small." % | ||
487 | (get_err_str(self), size)) | ||
488 | break | ||
489 | |||
diff --git a/scripts/lib/wic/pluginbase.py b/scripts/lib/wic/pluginbase.py index d9b4e57747..b64568339b 100644 --- a/scripts/lib/wic/pluginbase.py +++ b/scripts/lib/wic/pluginbase.py | |||
@@ -9,9 +9,11 @@ __all__ = ['ImagerPlugin', 'SourcePlugin'] | |||
9 | 9 | ||
10 | import os | 10 | import os |
11 | import logging | 11 | import logging |
12 | import types | ||
12 | 13 | ||
13 | from collections import defaultdict | 14 | from collections import defaultdict |
14 | from importlib.machinery import SourceFileLoader | 15 | import importlib |
16 | import importlib.util | ||
15 | 17 | ||
16 | from wic import WicError | 18 | from wic import WicError |
17 | from wic.misc import get_bitbake_var | 19 | from wic.misc import get_bitbake_var |
@@ -54,7 +56,9 @@ class PluginMgr: | |||
54 | mname = fname[:-3] | 56 | mname = fname[:-3] |
55 | mpath = os.path.join(ppath, fname) | 57 | mpath = os.path.join(ppath, fname) |
56 | logger.debug("loading plugin module %s", mpath) | 58 | logger.debug("loading plugin module %s", mpath) |
57 | SourceFileLoader(mname, mpath).load_module() | 59 | spec = importlib.util.spec_from_file_location(mname, mpath) |
60 | module = importlib.util.module_from_spec(spec) | ||
61 | spec.loader.exec_module(module) | ||
58 | 62 | ||
59 | return PLUGINS.get(ptype) | 63 | return PLUGINS.get(ptype) |
60 | 64 | ||
diff --git a/scripts/lib/wic/plugins/imager/direct.py b/scripts/lib/wic/plugins/imager/direct.py index 7e1c1c03ab..42704d1e10 100644 --- a/scripts/lib/wic/plugins/imager/direct.py +++ b/scripts/lib/wic/plugins/imager/direct.py | |||
@@ -115,7 +115,7 @@ class DirectPlugin(ImagerPlugin): | |||
115 | updated = False | 115 | updated = False |
116 | for part in self.parts: | 116 | for part in self.parts: |
117 | if not part.realnum or not part.mountpoint \ | 117 | if not part.realnum or not part.mountpoint \ |
118 | or part.mountpoint == "/": | 118 | or part.mountpoint == "/" or not (part.mountpoint.startswith('/') or part.mountpoint == "swap"): |
119 | continue | 119 | continue |
120 | 120 | ||
121 | if part.use_uuid: | 121 | if part.use_uuid: |
diff --git a/scripts/lib/wic/plugins/source/bootimg-efi.py b/scripts/lib/wic/plugins/source/bootimg-efi.py index 2cfdc10ecd..05e8471116 100644 --- a/scripts/lib/wic/plugins/source/bootimg-efi.py +++ b/scripts/lib/wic/plugins/source/bootimg-efi.py | |||
@@ -277,6 +277,13 @@ class BootimgEFIPlugin(SourcePlugin): | |||
277 | logger.debug("Added %d extra blocks to %s to get to %d total blocks", | 277 | logger.debug("Added %d extra blocks to %s to get to %d total blocks", |
278 | extra_blocks, part.mountpoint, blocks) | 278 | extra_blocks, part.mountpoint, blocks) |
279 | 279 | ||
280 | # required for compatibility with certain devices expecting file system | ||
281 | # block count to be equal to partition block count | ||
282 | if blocks < part.fixed_size: | ||
283 | blocks = part.fixed_size | ||
284 | logger.debug("Overriding %s to %d total blocks for compatibility", | ||
285 | part.mountpoint, blocks) | ||
286 | |||
280 | # dosfs image, created by mkdosfs | 287 | # dosfs image, created by mkdosfs |
281 | bootimg = "%s/boot.img" % cr_workdir | 288 | bootimg = "%s/boot.img" % cr_workdir |
282 | 289 | ||
diff --git a/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/scripts/lib/wic/plugins/source/bootimg-pcbios.py index f2639e7004..32e47f1831 100644 --- a/scripts/lib/wic/plugins/source/bootimg-pcbios.py +++ b/scripts/lib/wic/plugins/source/bootimg-pcbios.py | |||
@@ -186,8 +186,10 @@ class BootimgPcbiosPlugin(SourcePlugin): | |||
186 | # dosfs image, created by mkdosfs | 186 | # dosfs image, created by mkdosfs |
187 | bootimg = "%s/boot%s.img" % (cr_workdir, part.lineno) | 187 | bootimg = "%s/boot%s.img" % (cr_workdir, part.lineno) |
188 | 188 | ||
189 | dosfs_cmd = "mkdosfs -n boot -i %s -S 512 -C %s %d" % \ | 189 | label = part.label if part.label else "boot" |
190 | (part.fsuuid, bootimg, blocks) | 190 | |
191 | dosfs_cmd = "mkdosfs -n %s -i %s -S 512 -C %s %d" % \ | ||
192 | (label, part.fsuuid, bootimg, blocks) | ||
191 | exec_native_cmd(dosfs_cmd, native_sysroot) | 193 | exec_native_cmd(dosfs_cmd, native_sysroot) |
192 | 194 | ||
193 | mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (bootimg, hdddir) | 195 | mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (bootimg, hdddir) |
diff --git a/scripts/nativesdk-intercept/chgrp b/scripts/nativesdk-intercept/chgrp new file mode 100755 index 0000000000..30cc417d3a --- /dev/null +++ b/scripts/nativesdk-intercept/chgrp | |||
@@ -0,0 +1,27 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # | ||
3 | # Wrapper around 'chgrp' that redirects to root in all cases | ||
4 | |||
5 | import os | ||
6 | import shutil | ||
7 | import sys | ||
8 | |||
9 | # calculate path to the real 'chgrp' | ||
10 | path = os.environ['PATH'] | ||
11 | path = path.replace(os.path.dirname(sys.argv[0]), '') | ||
12 | real_chgrp = shutil.which('chgrp', path=path) | ||
13 | |||
14 | args = list() | ||
15 | |||
16 | found = False | ||
17 | for i in sys.argv: | ||
18 | if i.startswith("-"): | ||
19 | args.append(i) | ||
20 | continue | ||
21 | if not found: | ||
22 | args.append("root") | ||
23 | found = True | ||
24 | else: | ||
25 | args.append(i) | ||
26 | |||
27 | os.execv(real_chgrp, args) | ||
diff --git a/scripts/nativesdk-intercept/chown b/scripts/nativesdk-intercept/chown new file mode 100755 index 0000000000..3914b3e384 --- /dev/null +++ b/scripts/nativesdk-intercept/chown | |||
@@ -0,0 +1,27 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # | ||
3 | # Wrapper around 'chown' that redirects to root in all cases | ||
4 | |||
5 | import os | ||
6 | import shutil | ||
7 | import sys | ||
8 | |||
9 | # calculate path to the real 'chown' | ||
10 | path = os.environ['PATH'] | ||
11 | path = path.replace(os.path.dirname(sys.argv[0]), '') | ||
12 | real_chown = shutil.which('chown', path=path) | ||
13 | |||
14 | args = list() | ||
15 | |||
16 | found = False | ||
17 | for i in sys.argv: | ||
18 | if i.startswith("-"): | ||
19 | args.append(i) | ||
20 | continue | ||
21 | if not found: | ||
22 | args.append("root:root") | ||
23 | found = True | ||
24 | else: | ||
25 | args.append(i) | ||
26 | |||
27 | os.execv(real_chown, args) | ||
diff --git a/scripts/oe-depends-dot b/scripts/oe-depends-dot index 5eb3e12769..1c2d51c6ec 100755 --- a/scripts/oe-depends-dot +++ b/scripts/oe-depends-dot | |||
@@ -15,7 +15,7 @@ class Dot(object): | |||
15 | def __init__(self): | 15 | def __init__(self): |
16 | parser = argparse.ArgumentParser( | 16 | parser = argparse.ArgumentParser( |
17 | description="Analyse recipe-depends.dot generated by bitbake -g", | 17 | description="Analyse recipe-depends.dot generated by bitbake -g", |
18 | epilog="Use %(prog)s --help to get help") | 18 | formatter_class=argparse.RawDescriptionHelpFormatter) |
19 | parser.add_argument("dotfile", | 19 | parser.add_argument("dotfile", |
20 | help = "Specify the dotfile", nargs = 1, action='store', default='') | 20 | help = "Specify the dotfile", nargs = 1, action='store', default='') |
21 | parser.add_argument("-k", "--key", | 21 | parser.add_argument("-k", "--key", |
@@ -32,6 +32,21 @@ class Dot(object): | |||
32 | " For example, A->B, B->C, A->C, then A->C can be removed.", | 32 | " For example, A->B, B->C, A->C, then A->C can be removed.", |
33 | action="store_true", default=False) | 33 | action="store_true", default=False) |
34 | 34 | ||
35 | parser.epilog = """ | ||
36 | Examples: | ||
37 | First generate the .dot file: | ||
38 | bitbake -g core-image-minimal | ||
39 | |||
40 | To find out why a package is being built: | ||
41 | %(prog)s -k <package> -w ./task-depends.dot | ||
42 | |||
43 | To find out what a package depends on: | ||
44 | %(prog)s -k <package> -d ./task-depends.dot | ||
45 | |||
46 | Reduce the .dot file packages only, no tasks: | ||
47 | %(prog)s -r ./task-depends.dot | ||
48 | """ | ||
49 | |||
35 | self.args = parser.parse_args() | 50 | self.args = parser.parse_args() |
36 | 51 | ||
37 | if len(sys.argv) != 3 and len(sys.argv) < 5: | 52 | if len(sys.argv) != 3 and len(sys.argv) < 5: |
@@ -99,6 +114,10 @@ class Dot(object): | |||
99 | if key == "meta-world-pkgdata": | 114 | if key == "meta-world-pkgdata": |
100 | continue | 115 | continue |
101 | dep = m.group(2) | 116 | dep = m.group(2) |
117 | key = key.split('.')[0] | ||
118 | dep = dep.split('.')[0] | ||
119 | if key == dep: | ||
120 | continue | ||
102 | if key in depends: | 121 | if key in depends: |
103 | if not key in depends[key]: | 122 | if not key in depends[key]: |
104 | depends[key].add(dep) | 123 | depends[key].add(dep) |
diff --git a/scripts/oe-pkgdata-browser b/scripts/oe-pkgdata-browser index 8d223185a4..65a6ee956e 100755 --- a/scripts/oe-pkgdata-browser +++ b/scripts/oe-pkgdata-browser | |||
@@ -236,6 +236,8 @@ class PkgUi(): | |||
236 | update_deps("RPROVIDES", "Provides: ", self.provides_label, clickable=False) | 236 | update_deps("RPROVIDES", "Provides: ", self.provides_label, clickable=False) |
237 | 237 | ||
238 | def load_recipes(self): | 238 | def load_recipes(self): |
239 | if not os.path.exists(pkgdata): | ||
240 | sys.exit("Error: Please ensure %s exists by generating packages before using this tool." % pkgdata) | ||
239 | for recipe in sorted(os.listdir(pkgdata)): | 241 | for recipe in sorted(os.listdir(pkgdata)): |
240 | if os.path.isfile(os.path.join(pkgdata, recipe)): | 242 | if os.path.isfile(os.path.join(pkgdata, recipe)): |
241 | self.recipe_iters[recipe] = self.recipe_store.append([recipe]) | 243 | self.recipe_iters[recipe] = self.recipe_store.append([recipe]) |
diff --git a/scripts/oe-setup-builddir b/scripts/oe-setup-builddir index 30eaa8efbe..5a51fa793f 100755 --- a/scripts/oe-setup-builddir +++ b/scripts/oe-setup-builddir | |||
@@ -113,10 +113,10 @@ if [ ! -z "$SHOWYPDOC" ]; then | |||
113 | cat <<EOM | 113 | cat <<EOM |
114 | The Yocto Project has extensive documentation about OE including a reference | 114 | The Yocto Project has extensive documentation about OE including a reference |
115 | manual which can be found at: | 115 | manual which can be found at: |
116 | http://yoctoproject.org/documentation | 116 | https://docs.yoctoproject.org |
117 | 117 | ||
118 | For more information about OpenEmbedded see their website: | 118 | For more information about OpenEmbedded see their website: |
119 | http://www.openembedded.org/ | 119 | https://www.openembedded.org/ |
120 | 120 | ||
121 | EOM | 121 | EOM |
122 | # unset SHOWYPDOC | 122 | # unset SHOWYPDOC |
diff --git a/scripts/pybootchartgui/pybootchartgui/draw.py b/scripts/pybootchartgui/pybootchartgui/draw.py index 53324b9f8b..fc708b55c3 100644 --- a/scripts/pybootchartgui/pybootchartgui/draw.py +++ b/scripts/pybootchartgui/pybootchartgui/draw.py | |||
@@ -267,11 +267,14 @@ def draw_chart(ctx, color, fill, chart_bounds, data, proc_tree, data_range): | |||
267 | # avoid divide by zero | 267 | # avoid divide by zero |
268 | if max_y == 0: | 268 | if max_y == 0: |
269 | max_y = 1.0 | 269 | max_y = 1.0 |
270 | xscale = float (chart_bounds[2]) / (max_x - x_shift) | 270 | if (max_x - x_shift): |
271 | xscale = float (chart_bounds[2]) / (max_x - x_shift) | ||
272 | else: | ||
273 | xscale = float (chart_bounds[2]) | ||
271 | # If data_range is given, scale the chart so that the value range in | 274 | # If data_range is given, scale the chart so that the value range in |
272 | # data_range matches the chart bounds exactly. | 275 | # data_range matches the chart bounds exactly. |
273 | # Otherwise, scale so that the actual data matches the chart bounds. | 276 | # Otherwise, scale so that the actual data matches the chart bounds. |
274 | if data_range: | 277 | if data_range and (data_range[1] - data_range[0]): |
275 | yscale = float(chart_bounds[3]) / (data_range[1] - data_range[0]) | 278 | yscale = float(chart_bounds[3]) / (data_range[1] - data_range[0]) |
276 | ybase = data_range[0] | 279 | ybase = data_range[0] |
277 | else: | 280 | else: |
diff --git a/scripts/pybootchartgui/pybootchartgui/parsing.py b/scripts/pybootchartgui/pybootchartgui/parsing.py index b42dac6b88..9d6787ec5a 100644 --- a/scripts/pybootchartgui/pybootchartgui/parsing.py +++ b/scripts/pybootchartgui/pybootchartgui/parsing.py | |||
@@ -128,7 +128,7 @@ class Trace: | |||
128 | def compile(self, writer): | 128 | def compile(self, writer): |
129 | 129 | ||
130 | def find_parent_id_for(pid): | 130 | def find_parent_id_for(pid): |
131 | if pid is 0: | 131 | if pid == 0: |
132 | return 0 | 132 | return 0 |
133 | ppid = self.parent_map.get(pid) | 133 | ppid = self.parent_map.get(pid) |
134 | if ppid: | 134 | if ppid: |
diff --git a/scripts/relocate_sdk.py b/scripts/relocate_sdk.py index 8c0fdb986a..8079d13750 100755 --- a/scripts/relocate_sdk.py +++ b/scripts/relocate_sdk.py | |||
@@ -97,11 +97,12 @@ def change_interpreter(elf_file_name): | |||
97 | if (len(new_dl_path) >= p_filesz): | 97 | if (len(new_dl_path) >= p_filesz): |
98 | print("ERROR: could not relocate %s, interp size = %i and %i is needed." \ | 98 | print("ERROR: could not relocate %s, interp size = %i and %i is needed." \ |
99 | % (elf_file_name, p_memsz, len(new_dl_path) + 1)) | 99 | % (elf_file_name, p_memsz, len(new_dl_path) + 1)) |
100 | break | 100 | return False |
101 | dl_path = new_dl_path + b("\0") * (p_filesz - len(new_dl_path)) | 101 | dl_path = new_dl_path + b("\0") * (p_filesz - len(new_dl_path)) |
102 | f.seek(p_offset) | 102 | f.seek(p_offset) |
103 | f.write(dl_path) | 103 | f.write(dl_path) |
104 | break | 104 | break |
105 | return True | ||
105 | 106 | ||
106 | def change_dl_sysdirs(elf_file_name): | 107 | def change_dl_sysdirs(elf_file_name): |
107 | if arch == 32: | 108 | if arch == 32: |
@@ -215,6 +216,7 @@ else: | |||
215 | 216 | ||
216 | executables_list = sys.argv[3:] | 217 | executables_list = sys.argv[3:] |
217 | 218 | ||
219 | errors = False | ||
218 | for e in executables_list: | 220 | for e in executables_list: |
219 | perms = os.stat(e)[stat.ST_MODE] | 221 | perms = os.stat(e)[stat.ST_MODE] |
220 | if os.access(e, os.W_OK|os.R_OK): | 222 | if os.access(e, os.W_OK|os.R_OK): |
@@ -240,7 +242,8 @@ for e in executables_list: | |||
240 | arch = get_arch() | 242 | arch = get_arch() |
241 | if arch: | 243 | if arch: |
242 | parse_elf_header() | 244 | parse_elf_header() |
243 | change_interpreter(e) | 245 | if not change_interpreter(e): |
246 | errors = True | ||
244 | change_dl_sysdirs(e) | 247 | change_dl_sysdirs(e) |
245 | 248 | ||
246 | """ change permissions back """ | 249 | """ change permissions back """ |
@@ -253,3 +256,6 @@ for e in executables_list: | |||
253 | print("New file size for %s is different. Looks like a relocation error!", e) | 256 | print("New file size for %s is different. Looks like a relocation error!", e) |
254 | sys.exit(-1) | 257 | sys.exit(-1) |
255 | 258 | ||
259 | if errors: | ||
260 | print("Relocation of one or more executables failed.") | ||
261 | sys.exit(-1) | ||
diff --git a/scripts/runqemu b/scripts/runqemu index cc87ea871a..4dfc0e2d38 100755 --- a/scripts/runqemu +++ b/scripts/runqemu | |||
@@ -764,7 +764,7 @@ class BaseConfig(object): | |||
764 | raise RunQemuError('BIOS not found: %s' % bios_match_name) | 764 | raise RunQemuError('BIOS not found: %s' % bios_match_name) |
765 | 765 | ||
766 | if not os.path.exists(self.bios): | 766 | if not os.path.exists(self.bios): |
767 | raise RunQemuError("KERNEL %s not found" % self.bios) | 767 | raise RunQemuError("BIOS %s not found" % self.bios) |
768 | 768 | ||
769 | 769 | ||
770 | def check_mem(self): | 770 | def check_mem(self): |
@@ -974,17 +974,14 @@ class BaseConfig(object): | |||
974 | else: | 974 | else: |
975 | self.nfs_server = '192.168.7.1' | 975 | self.nfs_server = '192.168.7.1' |
976 | 976 | ||
977 | # Figure out a new nfs_instance to allow multiple qemus running. | 977 | nfsd_port = 3048 + self.nfs_instance |
978 | ps = subprocess.check_output(("ps", "auxww")).decode('utf-8') | 978 | lockdir = "/tmp/qemu-port-locks" |
979 | pattern = '/bin/unfsd .* -i .*\.pid -e .*/exports([0-9]+) ' | 979 | self.make_lock_dir(lockdir) |
980 | all_instances = re.findall(pattern, ps, re.M) | 980 | while not self.check_free_port('localhost', nfsd_port, lockdir): |
981 | if all_instances: | 981 | self.nfs_instance += 1 |
982 | all_instances.sort(key=int) | 982 | nfsd_port += 1 |
983 | self.nfs_instance = int(all_instances.pop()) + 1 | ||
984 | |||
985 | nfsd_port = 3049 + 2 * self.nfs_instance | ||
986 | mountd_port = 3048 + 2 * self.nfs_instance | ||
987 | 983 | ||
984 | mountd_port = nfsd_port | ||
988 | # Export vars for runqemu-export-rootfs | 985 | # Export vars for runqemu-export-rootfs |
989 | export_dict = { | 986 | export_dict = { |
990 | 'NFS_INSTANCE': self.nfs_instance, | 987 | 'NFS_INSTANCE': self.nfs_instance, |
@@ -1034,6 +1031,17 @@ class BaseConfig(object): | |||
1034 | self.set('NETWORK_CMD', '-netdev bridge,br=%s,id=net0,helper=%s -device virtio-net-pci,netdev=net0 ' % ( | 1031 | self.set('NETWORK_CMD', '-netdev bridge,br=%s,id=net0,helper=%s -device virtio-net-pci,netdev=net0 ' % ( |
1035 | self.net_bridge, os.path.join(self.bindir_native, 'qemu-oe-bridge-helper'))) | 1032 | self.net_bridge, os.path.join(self.bindir_native, 'qemu-oe-bridge-helper'))) |
1036 | 1033 | ||
1034 | def make_lock_dir(self, lockdir): | ||
1035 | if not os.path.exists(lockdir): | ||
1036 | # There might be a race issue when multi runqemu processess are | ||
1037 | # running at the same time. | ||
1038 | try: | ||
1039 | os.mkdir(lockdir) | ||
1040 | os.chmod(lockdir, 0o777) | ||
1041 | except FileExistsError: | ||
1042 | pass | ||
1043 | return | ||
1044 | |||
1037 | def setup_slirp(self): | 1045 | def setup_slirp(self): |
1038 | """Setup user networking""" | 1046 | """Setup user networking""" |
1039 | 1047 | ||
@@ -1052,14 +1060,7 @@ class BaseConfig(object): | |||
1052 | mac = 2 | 1060 | mac = 2 |
1053 | 1061 | ||
1054 | lockdir = "/tmp/qemu-port-locks" | 1062 | lockdir = "/tmp/qemu-port-locks" |
1055 | if not os.path.exists(lockdir): | 1063 | self.make_lock_dir(lockdir) |
1056 | # There might be a race issue when multi runqemu processess are | ||
1057 | # running at the same time. | ||
1058 | try: | ||
1059 | os.mkdir(lockdir) | ||
1060 | os.chmod(lockdir, 0o777) | ||
1061 | except FileExistsError: | ||
1062 | pass | ||
1063 | 1064 | ||
1064 | # Find a free port to avoid conflicts | 1065 | # Find a free port to avoid conflicts |
1065 | for p in ports[:]: | 1066 | for p in ports[:]: |
@@ -1099,14 +1100,7 @@ class BaseConfig(object): | |||
1099 | logger.error("ip: %s" % ip) | 1100 | logger.error("ip: %s" % ip) |
1100 | raise OEPathError("runqemu-ifup, runqemu-ifdown or ip not found") | 1101 | raise OEPathError("runqemu-ifup, runqemu-ifdown or ip not found") |
1101 | 1102 | ||
1102 | if not os.path.exists(lockdir): | 1103 | self.make_lock_dir(lockdir) |
1103 | # There might be a race issue when multi runqemu processess are | ||
1104 | # running at the same time. | ||
1105 | try: | ||
1106 | os.mkdir(lockdir) | ||
1107 | os.chmod(lockdir, 0o777) | ||
1108 | except FileExistsError: | ||
1109 | pass | ||
1110 | 1104 | ||
1111 | cmd = (ip, 'link') | 1105 | cmd = (ip, 'link') |
1112 | logger.debug('Running %s...' % str(cmd)) | 1106 | logger.debug('Running %s...' % str(cmd)) |
@@ -1328,6 +1322,8 @@ class BaseConfig(object): | |||
1328 | 1322 | ||
1329 | for ovmf in self.ovmf_bios: | 1323 | for ovmf in self.ovmf_bios: |
1330 | format = ovmf.rsplit('.', 1)[-1] | 1324 | format = ovmf.rsplit('.', 1)[-1] |
1325 | if format == "bin": | ||
1326 | format = "raw" | ||
1331 | self.qemu_opt += ' -drive if=pflash,format=%s,file=%s' % (format, ovmf) | 1327 | self.qemu_opt += ' -drive if=pflash,format=%s,file=%s' % (format, ovmf) |
1332 | 1328 | ||
1333 | self.qemu_opt += ' ' + self.qemu_opt_script | 1329 | self.qemu_opt += ' ' + self.qemu_opt_script |
@@ -1421,13 +1417,13 @@ class BaseConfig(object): | |||
1421 | logger.debug('Running %s' % str(cmd)) | 1417 | logger.debug('Running %s' % str(cmd)) |
1422 | subprocess.check_call(cmd) | 1418 | subprocess.check_call(cmd) |
1423 | self.release_taplock() | 1419 | self.release_taplock() |
1424 | self.release_portlock() | ||
1425 | 1420 | ||
1426 | if self.nfs_running: | 1421 | if self.nfs_running: |
1427 | logger.info("Shutting down the userspace NFS server...") | 1422 | logger.info("Shutting down the userspace NFS server...") |
1428 | cmd = ("runqemu-export-rootfs", "stop", self.rootfs) | 1423 | cmd = ("runqemu-export-rootfs", "stop", self.rootfs) |
1429 | logger.debug('Running %s' % str(cmd)) | 1424 | logger.debug('Running %s' % str(cmd)) |
1430 | subprocess.check_call(cmd) | 1425 | subprocess.check_call(cmd) |
1426 | self.release_portlock() | ||
1431 | 1427 | ||
1432 | if self.saved_stty: | 1428 | if self.saved_stty: |
1433 | subprocess.check_call(("stty", self.saved_stty)) | 1429 | subprocess.check_call(("stty", self.saved_stty)) |
@@ -1514,7 +1510,8 @@ def main(): | |||
1514 | 1510 | ||
1515 | def sigterm_handler(signum, frame): | 1511 | def sigterm_handler(signum, frame): |
1516 | logger.info("SIGTERM received") | 1512 | logger.info("SIGTERM received") |
1517 | os.kill(config.qemupid, signal.SIGTERM) | 1513 | if config.qemupid: |
1514 | os.kill(config.qemupid, signal.SIGTERM) | ||
1518 | config.cleanup() | 1515 | config.cleanup() |
1519 | # Deliberately ignore the return code of 'tput smam'. | 1516 | # Deliberately ignore the return code of 'tput smam'. |
1520 | subprocess.call(["tput", "smam"]) | 1517 | subprocess.call(["tput", "smam"]) |
diff --git a/scripts/verify-bashisms b/scripts/verify-bashisms index fb0cc719ea..14d8c298e9 100755 --- a/scripts/verify-bashisms +++ b/scripts/verify-bashisms | |||
@@ -100,7 +100,7 @@ if __name__=='__main__': | |||
100 | args = parser.parse_args() | 100 | args = parser.parse_args() |
101 | 101 | ||
102 | if shutil.which("checkbashisms.pl") is None: | 102 | if shutil.which("checkbashisms.pl") is None: |
103 | print("Cannot find checkbashisms.pl on $PATH, get it from https://anonscm.debian.org/cgit/collab-maint/devscripts.git/plain/scripts/checkbashisms.pl") | 103 | print("Cannot find checkbashisms.pl on $PATH, get it from https://salsa.debian.org/debian/devscripts/raw/master/scripts/checkbashisms.pl") |
104 | sys.exit(1) | 104 | sys.exit(1) |
105 | 105 | ||
106 | # The order of defining the worker function, | 106 | # The order of defining the worker function, |
diff --git a/scripts/wic b/scripts/wic index a741aed364..99a8a97ccb 100755 --- a/scripts/wic +++ b/scripts/wic | |||
@@ -22,9 +22,9 @@ import sys | |||
22 | import argparse | 22 | import argparse |
23 | import logging | 23 | import logging |
24 | import subprocess | 24 | import subprocess |
25 | import shutil | ||
25 | 26 | ||
26 | from collections import namedtuple | 27 | from collections import namedtuple |
27 | from distutils import spawn | ||
28 | 28 | ||
29 | # External modules | 29 | # External modules |
30 | scripts_path = os.path.dirname(os.path.realpath(__file__)) | 30 | scripts_path = os.path.dirname(os.path.realpath(__file__)) |
@@ -47,7 +47,7 @@ if os.environ.get('SDKTARGETSYSROOT'): | |||
47 | break | 47 | break |
48 | sdkroot = os.path.dirname(sdkroot) | 48 | sdkroot = os.path.dirname(sdkroot) |
49 | 49 | ||
50 | bitbake_exe = spawn.find_executable('bitbake') | 50 | bitbake_exe = shutil.which('bitbake') |
51 | if bitbake_exe: | 51 | if bitbake_exe: |
52 | bitbake_path = scriptpath.add_bitbake_lib_path() | 52 | bitbake_path = scriptpath.add_bitbake_lib_path() |
53 | import bb | 53 | import bb |
@@ -206,7 +206,7 @@ def wic_create_subcommand(options, usage_str): | |||
206 | logger.info(" (Please check that the build artifacts for the machine") | 206 | logger.info(" (Please check that the build artifacts for the machine") |
207 | logger.info(" selected in local.conf actually exist and that they") | 207 | logger.info(" selected in local.conf actually exist and that they") |
208 | logger.info(" are the correct artifacts for the image (.wks file)).\n") | 208 | logger.info(" are the correct artifacts for the image (.wks file)).\n") |
209 | raise WicError("The artifact that couldn't be found was %s:\n %s", not_found, not_found_dir) | 209 | raise WicError("The artifact that couldn't be found was %s:\n %s" % (not_found, not_found_dir)) |
210 | 210 | ||
211 | krootfs_dir = options.rootfs_dir | 211 | krootfs_dir = options.rootfs_dir |
212 | if krootfs_dir is None: | 212 | if krootfs_dir is None: |
diff --git a/scripts/yocto-check-layer b/scripts/yocto-check-layer index b7c83c8b54..dd930cdddd 100755 --- a/scripts/yocto-check-layer +++ b/scripts/yocto-check-layer | |||
@@ -24,7 +24,7 @@ import scriptpath | |||
24 | scriptpath.add_oe_lib_path() | 24 | scriptpath.add_oe_lib_path() |
25 | scriptpath.add_bitbake_lib_path() | 25 | scriptpath.add_bitbake_lib_path() |
26 | 26 | ||
27 | from checklayer import LayerType, detect_layers, add_layers, add_layer_dependencies, get_signatures, check_bblayers | 27 | from checklayer import LayerType, detect_layers, add_layers, add_layer_dependencies, get_layer_dependencies, get_signatures, check_bblayers |
28 | from oeqa.utils.commands import get_bb_vars | 28 | from oeqa.utils.commands import get_bb_vars |
29 | 29 | ||
30 | PROGNAME = 'yocto-check-layer' | 30 | PROGNAME = 'yocto-check-layer' |
@@ -51,6 +51,8 @@ def main(): | |||
51 | help='File to output log (optional)', action='store') | 51 | help='File to output log (optional)', action='store') |
52 | parser.add_argument('--dependency', nargs="+", | 52 | parser.add_argument('--dependency', nargs="+", |
53 | help='Layers to process for dependencies', action='store') | 53 | help='Layers to process for dependencies', action='store') |
54 | parser.add_argument('--no-auto-dependency', help='Disable automatic testing of dependencies', | ||
55 | action='store_true') | ||
54 | parser.add_argument('--machines', nargs="+", | 56 | parser.add_argument('--machines', nargs="+", |
55 | help='List of MACHINEs to be used during testing', action='store') | 57 | help='List of MACHINEs to be used during testing', action='store') |
56 | parser.add_argument('--additional-layers', nargs="+", | 58 | parser.add_argument('--additional-layers', nargs="+", |
@@ -121,6 +123,21 @@ def main(): | |||
121 | if not layers: | 123 | if not layers: |
122 | return 1 | 124 | return 1 |
123 | 125 | ||
126 | # Find all dependencies, and get them checked too | ||
127 | if not args.no_auto_dependency: | ||
128 | depends = [] | ||
129 | for layer in layers: | ||
130 | layer_depends = get_layer_dependencies(layer, dep_layers, logger) | ||
131 | if layer_depends: | ||
132 | for d in layer_depends: | ||
133 | if d not in depends: | ||
134 | depends.append(d) | ||
135 | |||
136 | for d in depends: | ||
137 | if d not in layers: | ||
138 | logger.info("Adding %s to the list of layers to test, as a dependency", d['name']) | ||
139 | layers.append(d) | ||
140 | |||
124 | shutil.copyfile(bblayersconf, bblayersconf + '.backup') | 141 | shutil.copyfile(bblayersconf, bblayersconf + '.backup') |
125 | def cleanup_bblayers(signum, frame): | 142 | def cleanup_bblayers(signum, frame): |
126 | shutil.copyfile(bblayersconf + '.backup', bblayersconf) | 143 | shutil.copyfile(bblayersconf + '.backup', bblayersconf) |
@@ -138,6 +155,9 @@ def main(): | |||
138 | layer['type'] == LayerType.ERROR_BSP_DISTRO: | 155 | layer['type'] == LayerType.ERROR_BSP_DISTRO: |
139 | continue | 156 | continue |
140 | 157 | ||
158 | # Reset to a clean backup copy for each run | ||
159 | shutil.copyfile(bblayersconf + '.backup', bblayersconf) | ||
160 | |||
141 | if check_bblayers(bblayersconf, layer['path'], logger): | 161 | if check_bblayers(bblayersconf, layer['path'], logger): |
142 | logger.info("%s already in %s. To capture initial signatures, layer under test should not present " | 162 | logger.info("%s already in %s. To capture initial signatures, layer under test should not present " |
143 | "in BBLAYERS. Please remove %s from BBLAYERS." % (layer['name'], bblayersconf, layer['name'])) | 163 | "in BBLAYERS. Please remove %s from BBLAYERS." % (layer['name'], bblayersconf, layer['name'])) |
@@ -149,17 +169,13 @@ def main(): | |||
149 | logger.info("Setting up for %s(%s), %s" % (layer['name'], layer['type'], | 169 | logger.info("Setting up for %s(%s), %s" % (layer['name'], layer['type'], |
150 | layer['path'])) | 170 | layer['path'])) |
151 | 171 | ||
152 | shutil.copyfile(bblayersconf + '.backup', bblayersconf) | ||
153 | |||
154 | missing_dependencies = not add_layer_dependencies(bblayersconf, layer, dep_layers, logger) | 172 | missing_dependencies = not add_layer_dependencies(bblayersconf, layer, dep_layers, logger) |
155 | if not missing_dependencies: | 173 | if not missing_dependencies: |
156 | for additional_layer in additional_layers: | 174 | for additional_layer in additional_layers: |
157 | if not add_layer_dependencies(bblayersconf, additional_layer, dep_layers, logger): | 175 | if not add_layer_dependencies(bblayersconf, additional_layer, dep_layers, logger): |
158 | missing_dependencies = True | 176 | missing_dependencies = True |
159 | break | 177 | break |
160 | if not add_layer_dependencies(bblayersconf, layer, dep_layers, logger) or \ | 178 | if missing_dependencies: |
161 | any(map(lambda additional_layer: not add_layer_dependencies(bblayersconf, additional_layer, dep_layers, logger), | ||
162 | additional_layers)): | ||
163 | logger.info('Skipping %s due to missing dependencies.' % layer['name']) | 179 | logger.info('Skipping %s due to missing dependencies.' % layer['name']) |
164 | results[layer['name']] = None | 180 | results[layer['name']] = None |
165 | results_status[layer['name']] = 'SKIPPED (Missing dependencies)' | 181 | results_status[layer['name']] = 'SKIPPED (Missing dependencies)' |