diff options
Diffstat (limited to 'scripts/contrib')
-rwxr-xr-x | scripts/contrib/bb-perf/bb-matrix-plot.sh | 4 | ||||
-rwxr-xr-x | scripts/contrib/bbvars.py | 6 | ||||
-rwxr-xr-x | scripts/contrib/build-perf-test-wrapper.sh | 15 | ||||
-rwxr-xr-x | scripts/contrib/convert-overrides.py | 155 | ||||
-rwxr-xr-x | scripts/contrib/convert-spdx-licenses.py | 145 | ||||
-rwxr-xr-x | scripts/contrib/convert-srcuri.py | 77 | ||||
-rwxr-xr-x | scripts/contrib/convert-variable-renames.py | 116 | ||||
-rwxr-xr-x | scripts/contrib/ddimage | 2 | ||||
-rwxr-xr-x | scripts/contrib/dialog-power-control | 2 | ||||
-rwxr-xr-x | scripts/contrib/documentation-audit.sh | 6 | ||||
-rwxr-xr-x | scripts/contrib/image-manifest | 523 | ||||
-rwxr-xr-x | scripts/contrib/list-packageconfig-flags.py | 2 | ||||
-rwxr-xr-x | scripts/contrib/oe-build-perf-report-email.py | 167 | ||||
-rwxr-xr-x | scripts/contrib/patchreview.py | 71 | ||||
-rwxr-xr-x | scripts/contrib/test_build_time.sh | 2 | ||||
-rwxr-xr-x | scripts/contrib/test_build_time_worker.sh | 2 | ||||
-rwxr-xr-x | scripts/contrib/verify-homepage.py | 2 |
17 files changed, 1098 insertions, 199 deletions
diff --git a/scripts/contrib/bb-perf/bb-matrix-plot.sh b/scripts/contrib/bb-perf/bb-matrix-plot.sh index e7bd129e9e..6672189c95 100755 --- a/scripts/contrib/bb-perf/bb-matrix-plot.sh +++ b/scripts/contrib/bb-perf/bb-matrix-plot.sh | |||
@@ -16,8 +16,8 @@ | |||
16 | 16 | ||
17 | # Setup the defaults | 17 | # Setup the defaults |
18 | DATFILE="bb-matrix.dat" | 18 | DATFILE="bb-matrix.dat" |
19 | XLABEL="BB_NUMBER_THREADS" | 19 | XLABEL="BB\\\\_NUMBER\\\\_THREADS" |
20 | YLABEL="PARALLEL_MAKE" | 20 | YLABEL="PARALLEL\\\\_MAKE" |
21 | FIELD=3 | 21 | FIELD=3 |
22 | DEF_TITLE="Elapsed Time (seconds)" | 22 | DEF_TITLE="Elapsed Time (seconds)" |
23 | PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100" | 23 | PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100" |
diff --git a/scripts/contrib/bbvars.py b/scripts/contrib/bbvars.py index 090133600b..a9cdf082ab 100755 --- a/scripts/contrib/bbvars.py +++ b/scripts/contrib/bbvars.py | |||
@@ -36,8 +36,8 @@ def bbvar_is_documented(var, documented_vars): | |||
36 | def collect_documented_vars(docfiles): | 36 | def collect_documented_vars(docfiles): |
37 | ''' Walk the docfiles and collect the documented variables ''' | 37 | ''' Walk the docfiles and collect the documented variables ''' |
38 | documented_vars = [] | 38 | documented_vars = [] |
39 | prog = re.compile(".*($|[^A-Z_])<glossentry id=\'var-") | 39 | prog = re.compile(r".*($|[^A-Z_])<glossentry id=\'var-") |
40 | var_prog = re.compile('<glossentry id=\'var-(.*)\'>') | 40 | var_prog = re.compile(r'<glossentry id=\'var-(.*)\'>') |
41 | for d in docfiles: | 41 | for d in docfiles: |
42 | with open(d) as f: | 42 | with open(d) as f: |
43 | documented_vars += var_prog.findall(f.read()) | 43 | documented_vars += var_prog.findall(f.read()) |
@@ -45,7 +45,7 @@ def collect_documented_vars(docfiles): | |||
45 | return documented_vars | 45 | return documented_vars |
46 | 46 | ||
47 | def bbvar_doctag(var, docconf): | 47 | def bbvar_doctag(var, docconf): |
48 | prog = re.compile('^%s\[doc\] *= *"(.*)"' % (var)) | 48 | prog = re.compile(r'^%s\[doc\] *= *"(.*)"' % (var)) |
49 | if docconf == "": | 49 | if docconf == "": |
50 | return "?" | 50 | return "?" |
51 | 51 | ||
diff --git a/scripts/contrib/build-perf-test-wrapper.sh b/scripts/contrib/build-perf-test-wrapper.sh index fa71d4a2e9..0a85e6e708 100755 --- a/scripts/contrib/build-perf-test-wrapper.sh +++ b/scripts/contrib/build-perf-test-wrapper.sh | |||
@@ -87,21 +87,10 @@ if [ $# -ne 0 ]; then | |||
87 | exit 1 | 87 | exit 1 |
88 | fi | 88 | fi |
89 | 89 | ||
90 | if [ -n "$email_to" ]; then | ||
91 | if ! [ -x "$(command -v phantomjs)" ]; then | ||
92 | echo "ERROR: Sending email needs phantomjs." | ||
93 | exit 1 | ||
94 | fi | ||
95 | if ! [ -x "$(command -v optipng)" ]; then | ||
96 | echo "ERROR: Sending email needs optipng." | ||
97 | exit 1 | ||
98 | fi | ||
99 | fi | ||
100 | |||
101 | # Open a file descriptor for flock and acquire lock | 90 | # Open a file descriptor for flock and acquire lock |
102 | LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock" | 91 | LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock" |
103 | if ! exec 3> "$LOCK_FILE"; then | 92 | if ! exec 3> "$LOCK_FILE"; then |
104 | echo "ERROR: Unable to open lock file" | 93 | echo "ERROR: Unable to open loemack file" |
105 | exit 1 | 94 | exit 1 |
106 | fi | 95 | fi |
107 | if ! flock -n 3; then | 96 | if ! flock -n 3; then |
@@ -226,7 +215,7 @@ if [ -n "$results_repo" ]; then | |||
226 | if [ -n "$email_to" ]; then | 215 | if [ -n "$email_to" ]; then |
227 | echo "Emailing test report" | 216 | echo "Emailing test report" |
228 | os_name=`get_os_release_var PRETTY_NAME` | 217 | os_name=`get_os_release_var PRETTY_NAME` |
229 | "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt --html $report_html "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}" | 218 | "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}" |
230 | fi | 219 | fi |
231 | 220 | ||
232 | # Upload report files, unless we're on detached head | 221 | # Upload report files, unless we're on detached head |
diff --git a/scripts/contrib/convert-overrides.py b/scripts/contrib/convert-overrides.py new file mode 100755 index 0000000000..c69acb4095 --- /dev/null +++ b/scripts/contrib/convert-overrides.py | |||
@@ -0,0 +1,155 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # | ||
3 | # Conversion script to add new override syntax to existing bitbake metadata | ||
4 | # | ||
5 | # Copyright (C) 2021 Richard Purdie | ||
6 | # | ||
7 | # SPDX-License-Identifier: GPL-2.0-only | ||
8 | # | ||
9 | |||
10 | # | ||
11 | # To use this script on a new layer you need to list the overrides the | ||
12 | # layer is known to use in the list below. | ||
13 | # | ||
14 | # Known constraint: Matching is 'loose' and in particular will find variable | ||
15 | # and function names with "_append" and "_remove" in them. Those need to be | ||
16 | # filtered out manually or in the skip list below. | ||
17 | # | ||
18 | |||
19 | import re | ||
20 | import os | ||
21 | import sys | ||
22 | import tempfile | ||
23 | import shutil | ||
24 | import mimetypes | ||
25 | import argparse | ||
26 | |||
27 | parser = argparse.ArgumentParser(description="Convert override syntax") | ||
28 | parser.add_argument("--override", "-o", action="append", default=[], help="Add additional strings to consider as an override (e.g. custom machines/distros") | ||
29 | parser.add_argument("--skip", "-s", action="append", default=[], help="Add additional string to skip and not consider an override") | ||
30 | parser.add_argument("--skip-ext", "-e", action="append", default=[], help="Additional file suffixes to skip when processing (e.g. '.foo')") | ||
31 | parser.add_argument("--package-vars", action="append", default=[], help="Additional variables to treat as package variables") | ||
32 | parser.add_argument("--image-vars", action="append", default=[], help="Additional variables to treat as image variables") | ||
33 | parser.add_argument("--short-override", action="append", default=[], help="Additional strings to treat as short overrides") | ||
34 | parser.add_argument("path", nargs="+", help="Paths to convert") | ||
35 | |||
36 | args = parser.parse_args() | ||
37 | |||
38 | # List of strings to treat as overrides | ||
39 | vars = args.override | ||
40 | vars += ["append", "prepend", "remove"] | ||
41 | vars += ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"] | ||
42 | vars += ["genericx86", "edgerouter", "beaglebone-yocto"] | ||
43 | vars += ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"] | ||
44 | vars += ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"] | ||
45 | vars += ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"] | ||
46 | vars += ["tune-", "pn-", "forcevariable"] | ||
47 | vars += ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"] | ||
48 | vars += ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"] | ||
49 | vars += ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"] | ||
50 | vars += ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"] | ||
51 | vars += ["linux-gnueabi", "eabi"] | ||
52 | vars += ["virtclass-multilib", "virtclass-mcextend"] | ||
53 | |||
54 | # List of strings to treat as overrides but only with whitespace following or another override (more restricted matching). | ||
55 | # Handles issues with arc matching arch. | ||
56 | shortvars = ["arc", "mips", "mipsel", "sh4"] + args.short_override | ||
57 | |||
58 | # Variables which take packagenames as an override | ||
59 | packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY", | ||
60 | "pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE", | ||
61 | "PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst", | ||
62 | "pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA", | ||
63 | "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"] + args.package_vars | ||
64 | |||
65 | # Expressions to skip if encountered, these are not overrides | ||
66 | skips = args.skip | ||
67 | skips += ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"] | ||
68 | skips += ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"] | ||
69 | skips += ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"] | ||
70 | skips += ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"] | ||
71 | skips += ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"] | ||
72 | skips += ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"] | ||
73 | skips += ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"] | ||
74 | skips += ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"] | ||
75 | skips += ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"] | ||
76 | |||
77 | imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"] + args.image_vars | ||
78 | packagevars += imagevars | ||
79 | |||
80 | skip_ext = [".html", ".patch", ".m4", ".diff"] + args.skip_ext | ||
81 | |||
82 | vars_re = {} | ||
83 | for exp in vars: | ||
84 | vars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp) | ||
85 | |||
86 | shortvars_re = {} | ||
87 | for exp in shortvars: | ||
88 | shortvars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + r'([\(\'"\s:])'), r"\1:" + exp + r"\3") | ||
89 | |||
90 | package_re = {} | ||
91 | for exp in packagevars: | ||
92 | package_re[exp] = (re.compile(r'(^|[#\'"\s\-\+]+)' + exp + r'_' + r'([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2") | ||
93 | |||
94 | # Other substitutions to make | ||
95 | subs = { | ||
96 | 'r = re.compile(r"([^:]+):\s*(.*)")' : 'r = re.compile(r"(^.+?):\s+(.*)")', | ||
97 | "val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))", | ||
98 | "f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))", | ||
99 | "d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))", | ||
100 | 'ret.append(v + "_" + p)' : 'ret.append(v + ":" + p)', | ||
101 | } | ||
102 | |||
103 | def processfile(fn): | ||
104 | print("processing file '%s'" % fn) | ||
105 | try: | ||
106 | fh, abs_path = tempfile.mkstemp() | ||
107 | with os.fdopen(fh, 'w') as new_file: | ||
108 | with open(fn, "r") as old_file: | ||
109 | for line in old_file: | ||
110 | skip = False | ||
111 | for s in skips: | ||
112 | if s in line: | ||
113 | skip = True | ||
114 | if "ptest_append" in line or "ptest_remove" in line or "ptest_prepend" in line: | ||
115 | skip = False | ||
116 | for sub in subs: | ||
117 | if sub in line: | ||
118 | line = line.replace(sub, subs[sub]) | ||
119 | skip = True | ||
120 | if not skip: | ||
121 | for pvar in packagevars: | ||
122 | line = package_re[pvar][0].sub(package_re[pvar][1], line) | ||
123 | for var in vars: | ||
124 | line = vars_re[var][0].sub(vars_re[var][1], line) | ||
125 | for shortvar in shortvars: | ||
126 | line = shortvars_re[shortvar][0].sub(shortvars_re[shortvar][1], line) | ||
127 | if "pkg_postinst:ontarget" in line: | ||
128 | line = line.replace("pkg_postinst:ontarget", "pkg_postinst_ontarget") | ||
129 | new_file.write(line) | ||
130 | shutil.copymode(fn, abs_path) | ||
131 | os.remove(fn) | ||
132 | shutil.move(abs_path, fn) | ||
133 | except UnicodeDecodeError: | ||
134 | pass | ||
135 | |||
136 | ourname = os.path.basename(sys.argv[0]) | ||
137 | ourversion = "0.9.3" | ||
138 | |||
139 | for p in args.path: | ||
140 | if os.path.isfile(p): | ||
141 | processfile(p) | ||
142 | else: | ||
143 | print("processing directory '%s'" % p) | ||
144 | for root, dirs, files in os.walk(p): | ||
145 | for name in files: | ||
146 | if name == ourname: | ||
147 | continue | ||
148 | fn = os.path.join(root, name) | ||
149 | if os.path.islink(fn): | ||
150 | continue | ||
151 | if "/.git/" in fn or any(fn.endswith(ext) for ext in skip_ext): | ||
152 | continue | ||
153 | processfile(fn) | ||
154 | |||
155 | print("All files processed with version %s" % ourversion) | ||
diff --git a/scripts/contrib/convert-spdx-licenses.py b/scripts/contrib/convert-spdx-licenses.py new file mode 100755 index 0000000000..4e194dee3f --- /dev/null +++ b/scripts/contrib/convert-spdx-licenses.py | |||
@@ -0,0 +1,145 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # | ||
3 | # Conversion script to change LICENSE entries to SPDX identifiers | ||
4 | # | ||
5 | # Copyright (C) 2021-2022 Richard Purdie | ||
6 | # | ||
7 | # SPDX-License-Identifier: GPL-2.0-only | ||
8 | # | ||
9 | |||
10 | import re | ||
11 | import os | ||
12 | import sys | ||
13 | import tempfile | ||
14 | import shutil | ||
15 | import mimetypes | ||
16 | |||
17 | if len(sys.argv) < 2: | ||
18 | print("Please specify a directory to run the conversion script against.") | ||
19 | sys.exit(1) | ||
20 | |||
21 | license_map = { | ||
22 | "AGPL-3" : "AGPL-3.0-only", | ||
23 | "AGPL-3+" : "AGPL-3.0-or-later", | ||
24 | "AGPLv3" : "AGPL-3.0-only", | ||
25 | "AGPLv3+" : "AGPL-3.0-or-later", | ||
26 | "AGPLv3.0" : "AGPL-3.0-only", | ||
27 | "AGPLv3.0+" : "AGPL-3.0-or-later", | ||
28 | "AGPL-3.0" : "AGPL-3.0-only", | ||
29 | "AGPL-3.0+" : "AGPL-3.0-or-later", | ||
30 | "BSD-0-Clause" : "0BSD", | ||
31 | "GPL-1" : "GPL-1.0-only", | ||
32 | "GPL-1+" : "GPL-1.0-or-later", | ||
33 | "GPLv1" : "GPL-1.0-only", | ||
34 | "GPLv1+" : "GPL-1.0-or-later", | ||
35 | "GPLv1.0" : "GPL-1.0-only", | ||
36 | "GPLv1.0+" : "GPL-1.0-or-later", | ||
37 | "GPL-1.0" : "GPL-1.0-only", | ||
38 | "GPL-1.0+" : "GPL-1.0-or-later", | ||
39 | "GPL-2" : "GPL-2.0-only", | ||
40 | "GPL-2+" : "GPL-2.0-or-later", | ||
41 | "GPLv2" : "GPL-2.0-only", | ||
42 | "GPLv2+" : "GPL-2.0-or-later", | ||
43 | "GPLv2.0" : "GPL-2.0-only", | ||
44 | "GPLv2.0+" : "GPL-2.0-or-later", | ||
45 | "GPL-2.0" : "GPL-2.0-only", | ||
46 | "GPL-2.0+" : "GPL-2.0-or-later", | ||
47 | "GPL-3" : "GPL-3.0-only", | ||
48 | "GPL-3+" : "GPL-3.0-or-later", | ||
49 | "GPLv3" : "GPL-3.0-only", | ||
50 | "GPLv3+" : "GPL-3.0-or-later", | ||
51 | "GPLv3.0" : "GPL-3.0-only", | ||
52 | "GPLv3.0+" : "GPL-3.0-or-later", | ||
53 | "GPL-3.0" : "GPL-3.0-only", | ||
54 | "GPL-3.0+" : "GPL-3.0-or-later", | ||
55 | "LGPLv2" : "LGPL-2.0-only", | ||
56 | "LGPLv2+" : "LGPL-2.0-or-later", | ||
57 | "LGPLv2.0" : "LGPL-2.0-only", | ||
58 | "LGPLv2.0+" : "LGPL-2.0-or-later", | ||
59 | "LGPL-2.0" : "LGPL-2.0-only", | ||
60 | "LGPL-2.0+" : "LGPL-2.0-or-later", | ||
61 | "LGPL2.1" : "LGPL-2.1-only", | ||
62 | "LGPL2.1+" : "LGPL-2.1-or-later", | ||
63 | "LGPLv2.1" : "LGPL-2.1-only", | ||
64 | "LGPLv2.1+" : "LGPL-2.1-or-later", | ||
65 | "LGPL-2.1" : "LGPL-2.1-only", | ||
66 | "LGPL-2.1+" : "LGPL-2.1-or-later", | ||
67 | "LGPLv3" : "LGPL-3.0-only", | ||
68 | "LGPLv3+" : "LGPL-3.0-or-later", | ||
69 | "LGPL-3.0" : "LGPL-3.0-only", | ||
70 | "LGPL-3.0+" : "LGPL-3.0-or-later", | ||
71 | "MPL-1" : "MPL-1.0", | ||
72 | "MPLv1" : "MPL-1.0", | ||
73 | "MPLv1.1" : "MPL-1.1", | ||
74 | "MPLv2" : "MPL-2.0", | ||
75 | "MIT-X" : "MIT", | ||
76 | "MIT-style" : "MIT", | ||
77 | "openssl" : "OpenSSL", | ||
78 | "PSF" : "PSF-2.0", | ||
79 | "PSFv2" : "PSF-2.0", | ||
80 | "Python-2" : "Python-2.0", | ||
81 | "Apachev2" : "Apache-2.0", | ||
82 | "Apache-2" : "Apache-2.0", | ||
83 | "Artisticv1" : "Artistic-1.0", | ||
84 | "Artistic-1" : "Artistic-1.0", | ||
85 | "AFL-2" : "AFL-2.0", | ||
86 | "AFL-1" : "AFL-1.2", | ||
87 | "AFLv2" : "AFL-2.0", | ||
88 | "AFLv1" : "AFL-1.2", | ||
89 | "CDDLv1" : "CDDL-1.0", | ||
90 | "CDDL-1" : "CDDL-1.0", | ||
91 | "EPLv1.0" : "EPL-1.0", | ||
92 | "FreeType" : "FTL", | ||
93 | "Nauman" : "Naumen", | ||
94 | "tcl" : "TCL", | ||
95 | "vim" : "Vim", | ||
96 | "SGIv1" : "SGI-1", | ||
97 | } | ||
98 | |||
99 | def processfile(fn): | ||
100 | print("processing file '%s'" % fn) | ||
101 | try: | ||
102 | fh, abs_path = tempfile.mkstemp() | ||
103 | modified = False | ||
104 | with os.fdopen(fh, 'w') as new_file: | ||
105 | with open(fn, "r") as old_file: | ||
106 | for line in old_file: | ||
107 | if not line.startswith("LICENSE"): | ||
108 | new_file.write(line) | ||
109 | continue | ||
110 | orig = line | ||
111 | for license in sorted(license_map, key=len, reverse=True): | ||
112 | for ending in ['"', "'", " ", ")"]: | ||
113 | line = line.replace(license + ending, license_map[license] + ending) | ||
114 | if orig != line: | ||
115 | modified = True | ||
116 | new_file.write(line) | ||
117 | new_file.close() | ||
118 | if modified: | ||
119 | shutil.copymode(fn, abs_path) | ||
120 | os.remove(fn) | ||
121 | shutil.move(abs_path, fn) | ||
122 | except UnicodeDecodeError: | ||
123 | pass | ||
124 | |||
125 | ourname = os.path.basename(sys.argv[0]) | ||
126 | ourversion = "0.01" | ||
127 | |||
128 | if os.path.isfile(sys.argv[1]): | ||
129 | processfile(sys.argv[1]) | ||
130 | sys.exit(0) | ||
131 | |||
132 | for targetdir in sys.argv[1:]: | ||
133 | print("processing directory '%s'" % targetdir) | ||
134 | for root, dirs, files in os.walk(targetdir): | ||
135 | for name in files: | ||
136 | if name == ourname: | ||
137 | continue | ||
138 | fn = os.path.join(root, name) | ||
139 | if os.path.islink(fn): | ||
140 | continue | ||
141 | if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"): | ||
142 | continue | ||
143 | processfile(fn) | ||
144 | |||
145 | print("All files processed with version %s" % ourversion) | ||
diff --git a/scripts/contrib/convert-srcuri.py b/scripts/contrib/convert-srcuri.py new file mode 100755 index 0000000000..587392334f --- /dev/null +++ b/scripts/contrib/convert-srcuri.py | |||
@@ -0,0 +1,77 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # | ||
3 | # Conversion script to update SRC_URI to add branch to git urls | ||
4 | # | ||
5 | # Copyright (C) 2021 Richard Purdie | ||
6 | # | ||
7 | # SPDX-License-Identifier: GPL-2.0-only | ||
8 | # | ||
9 | |||
10 | import re | ||
11 | import os | ||
12 | import sys | ||
13 | import tempfile | ||
14 | import shutil | ||
15 | import mimetypes | ||
16 | |||
17 | if len(sys.argv) < 2: | ||
18 | print("Please specify a directory to run the conversion script against.") | ||
19 | sys.exit(1) | ||
20 | |||
21 | def processfile(fn): | ||
22 | def matchline(line): | ||
23 | if "MIRROR" in line or ".*" in line or "GNOME_GIT" in line: | ||
24 | return False | ||
25 | return True | ||
26 | print("processing file '%s'" % fn) | ||
27 | try: | ||
28 | if "distro_alias.inc" in fn or "linux-yocto-custom.bb" in fn: | ||
29 | return | ||
30 | fh, abs_path = tempfile.mkstemp() | ||
31 | modified = False | ||
32 | with os.fdopen(fh, 'w') as new_file: | ||
33 | with open(fn, "r") as old_file: | ||
34 | for line in old_file: | ||
35 | if ("git://" in line or "gitsm://" in line) and "branch=" not in line and matchline(line): | ||
36 | if line.endswith('"\n'): | ||
37 | line = line.replace('"\n', ';branch=master"\n') | ||
38 | elif re.search('\s*\\\\$', line): | ||
39 | line = re.sub('\s*\\\\$', ';branch=master \\\\', line) | ||
40 | modified = True | ||
41 | if ("git://" in line or "gitsm://" in line) and "github.com" in line and "protocol=https" not in line and matchline(line): | ||
42 | if "protocol=git" in line: | ||
43 | line = line.replace('protocol=git', 'protocol=https') | ||
44 | elif line.endswith('"\n'): | ||
45 | line = line.replace('"\n', ';protocol=https"\n') | ||
46 | elif re.search('\s*\\\\$', line): | ||
47 | line = re.sub('\s*\\\\$', ';protocol=https \\\\', line) | ||
48 | modified = True | ||
49 | new_file.write(line) | ||
50 | if modified: | ||
51 | shutil.copymode(fn, abs_path) | ||
52 | os.remove(fn) | ||
53 | shutil.move(abs_path, fn) | ||
54 | except UnicodeDecodeError: | ||
55 | pass | ||
56 | |||
57 | ourname = os.path.basename(sys.argv[0]) | ||
58 | ourversion = "0.1" | ||
59 | |||
60 | if os.path.isfile(sys.argv[1]): | ||
61 | processfile(sys.argv[1]) | ||
62 | sys.exit(0) | ||
63 | |||
64 | for targetdir in sys.argv[1:]: | ||
65 | print("processing directory '%s'" % targetdir) | ||
66 | for root, dirs, files in os.walk(targetdir): | ||
67 | for name in files: | ||
68 | if name == ourname: | ||
69 | continue | ||
70 | fn = os.path.join(root, name) | ||
71 | if os.path.islink(fn): | ||
72 | continue | ||
73 | if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"): | ||
74 | continue | ||
75 | processfile(fn) | ||
76 | |||
77 | print("All files processed with version %s" % ourversion) | ||
diff --git a/scripts/contrib/convert-variable-renames.py b/scripts/contrib/convert-variable-renames.py new file mode 100755 index 0000000000..eded90ca61 --- /dev/null +++ b/scripts/contrib/convert-variable-renames.py | |||
@@ -0,0 +1,116 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | # | ||
3 | # Conversion script to rename variables to versions with improved terminology. | ||
4 | # Also highlights potentially problematic language and removed variables. | ||
5 | # | ||
6 | # Copyright (C) 2021 Richard Purdie | ||
7 | # Copyright (C) 2022 Wind River Systems, Inc. | ||
8 | # | ||
9 | # SPDX-License-Identifier: GPL-2.0-only | ||
10 | # | ||
11 | |||
12 | import re | ||
13 | import os | ||
14 | import sys | ||
15 | import tempfile | ||
16 | import shutil | ||
17 | import mimetypes | ||
18 | |||
19 | if len(sys.argv) < 2: | ||
20 | print("Please specify a directory to run the conversion script against.") | ||
21 | sys.exit(1) | ||
22 | |||
23 | renames = { | ||
24 | "BB_ENV_WHITELIST" : "BB_ENV_PASSTHROUGH", | ||
25 | "BB_ENV_EXTRAWHITE" : "BB_ENV_PASSTHROUGH_ADDITIONS", | ||
26 | "BB_HASHCONFIG_WHITELIST" : "BB_HASHCONFIG_IGNORE_VARS", | ||
27 | "BB_SETSCENE_ENFORCE_WHITELIST" : "BB_SETSCENE_ENFORCE_IGNORE_TASKS", | ||
28 | "BB_HASHBASE_WHITELIST" : "BB_BASEHASH_IGNORE_VARS", | ||
29 | "BB_HASHTASK_WHITELIST" : "BB_TASKHASH_IGNORE_TASKS", | ||
30 | "CVE_CHECK_PN_WHITELIST" : "CVE_CHECK_SKIP_RECIPE", | ||
31 | "CVE_CHECK_WHITELIST" : "CVE_CHECK_IGNORE", | ||
32 | "MULTI_PROVIDER_WHITELIST" : "BB_MULTI_PROVIDER_ALLOWED", | ||
33 | "PNBLACKLIST" : "SKIP_RECIPE", | ||
34 | "SDK_LOCAL_CONF_BLACKLIST" : "ESDK_LOCALCONF_REMOVE", | ||
35 | "SDK_LOCAL_CONF_WHITELIST" : "ESDK_LOCALCONF_ALLOW", | ||
36 | "SDK_INHERIT_BLACKLIST" : "ESDK_CLASS_INHERIT_DISABLE", | ||
37 | "SSTATE_DUPWHITELIST" : "SSTATE_ALLOW_OVERLAP_FILES", | ||
38 | "SYSROOT_DIRS_BLACKLIST" : "SYSROOT_DIRS_IGNORE", | ||
39 | "UNKNOWN_CONFIGURE_WHITELIST" : "UNKNOWN_CONFIGURE_OPT_IGNORE", | ||
40 | "ICECC_USER_CLASS_BL" : "ICECC_CLASS_DISABLE", | ||
41 | "ICECC_SYSTEM_CLASS_BL" : "ICECC_CLASS_DISABLE", | ||
42 | "ICECC_USER_PACKAGE_WL" : "ICECC_RECIPE_ENABLE", | ||
43 | "ICECC_USER_PACKAGE_BL" : "ICECC_RECIPE_DISABLE", | ||
44 | "ICECC_SYSTEM_PACKAGE_BL" : "ICECC_RECIPE_DISABLE", | ||
45 | "LICENSE_FLAGS_WHITELIST" : "LICENSE_FLAGS_ACCEPTED", | ||
46 | } | ||
47 | |||
48 | removed_list = [ | ||
49 | "BB_STAMP_WHITELIST", | ||
50 | "BB_STAMP_POLICY", | ||
51 | "INHERIT_BLACKLIST", | ||
52 | "TUNEABI_WHITELIST", | ||
53 | ] | ||
54 | |||
55 | context_check_list = [ | ||
56 | "blacklist", | ||
57 | "whitelist", | ||
58 | "abort", | ||
59 | ] | ||
60 | |||
61 | def processfile(fn): | ||
62 | |||
63 | print("processing file '%s'" % fn) | ||
64 | try: | ||
65 | fh, abs_path = tempfile.mkstemp() | ||
66 | modified = False | ||
67 | with os.fdopen(fh, 'w') as new_file: | ||
68 | with open(fn, "r") as old_file: | ||
69 | lineno = 0 | ||
70 | for line in old_file: | ||
71 | lineno += 1 | ||
72 | if not line or "BB_RENAMED_VARIABLE" in line: | ||
73 | continue | ||
74 | # Do the renames | ||
75 | for old_name, new_name in renames.items(): | ||
76 | if old_name in line: | ||
77 | line = line.replace(old_name, new_name) | ||
78 | modified = True | ||
79 | # Find removed names | ||
80 | for removed_name in removed_list: | ||
81 | if removed_name in line: | ||
82 | print("%s needs further work at line %s because %s has been deprecated" % (fn, lineno, removed_name)) | ||
83 | for check_word in context_check_list: | ||
84 | if re.search(check_word, line, re.IGNORECASE): | ||
85 | print("%s needs further work at line %s since it contains %s"% (fn, lineno, check_word)) | ||
86 | new_file.write(line) | ||
87 | new_file.close() | ||
88 | if modified: | ||
89 | print("*** Modified file '%s'" % (fn)) | ||
90 | shutil.copymode(fn, abs_path) | ||
91 | os.remove(fn) | ||
92 | shutil.move(abs_path, fn) | ||
93 | except UnicodeDecodeError: | ||
94 | pass | ||
95 | |||
96 | ourname = os.path.basename(sys.argv[0]) | ||
97 | ourversion = "0.1" | ||
98 | |||
99 | if os.path.isfile(sys.argv[1]): | ||
100 | processfile(sys.argv[1]) | ||
101 | sys.exit(0) | ||
102 | |||
103 | for targetdir in sys.argv[1:]: | ||
104 | print("processing directory '%s'" % targetdir) | ||
105 | for root, dirs, files in os.walk(targetdir): | ||
106 | for name in files: | ||
107 | if name == ourname: | ||
108 | continue | ||
109 | fn = os.path.join(root, name) | ||
110 | if os.path.islink(fn): | ||
111 | continue | ||
112 | if "ChangeLog" in fn or "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"): | ||
113 | continue | ||
114 | processfile(fn) | ||
115 | |||
116 | print("All files processed with version %s" % ourversion) | ||
diff --git a/scripts/contrib/ddimage b/scripts/contrib/ddimage index 7f2ad112a6..70eee8ebea 100755 --- a/scripts/contrib/ddimage +++ b/scripts/contrib/ddimage | |||
@@ -1,5 +1,7 @@ | |||
1 | #!/bin/sh | 1 | #!/bin/sh |
2 | # | 2 | # |
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 6 | # |
5 | 7 | ||
diff --git a/scripts/contrib/dialog-power-control b/scripts/contrib/dialog-power-control index ad6070c369..82c84baa1d 100755 --- a/scripts/contrib/dialog-power-control +++ b/scripts/contrib/dialog-power-control | |||
@@ -1,5 +1,7 @@ | |||
1 | #!/bin/sh | 1 | #!/bin/sh |
2 | # | 2 | # |
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 6 | # |
5 | # Simple script to show a manual power prompt for when you want to use | 7 | # Simple script to show a manual power prompt for when you want to use |
diff --git a/scripts/contrib/documentation-audit.sh b/scripts/contrib/documentation-audit.sh index 1191f57a8e..7197a2fcea 100755 --- a/scripts/contrib/documentation-audit.sh +++ b/scripts/contrib/documentation-audit.sh | |||
@@ -1,5 +1,7 @@ | |||
1 | #!/bin/bash | 1 | #!/bin/bash |
2 | # | 2 | # |
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 6 | # |
5 | # Perform an audit of which packages provide documentation and which | 7 | # Perform an audit of which packages provide documentation and which |
@@ -26,8 +28,8 @@ if [ -z "$BITBAKE" ]; then | |||
26 | fi | 28 | fi |
27 | 29 | ||
28 | echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results" | 30 | echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results" |
29 | echo "REMINDER: you need to set LICENSE_FLAGS_WHITELIST appropriately in local.conf or " | 31 | echo "REMINDER: you need to set LICENSE_FLAGS_ACCEPTED appropriately in local.conf or " |
30 | echo " you'll get false positives. For example, LICENSE_FLAGS_WHITELIST = \"Commercial\"" | 32 | echo " you'll get false positives. For example, LICENSE_FLAGS_ACCEPTED = \"commercial\"" |
31 | 33 | ||
32 | for pkg in `bitbake -s | awk '{ print \$1 }'`; do | 34 | for pkg in `bitbake -s | awk '{ print \$1 }'`; do |
33 | if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" || | 35 | if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" || |
diff --git a/scripts/contrib/image-manifest b/scripts/contrib/image-manifest new file mode 100755 index 0000000000..4d65a99258 --- /dev/null +++ b/scripts/contrib/image-manifest | |||
@@ -0,0 +1,523 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | |||
3 | # Script to extract information from image manifests | ||
4 | # | ||
5 | # Copyright (C) 2018 Intel Corporation | ||
6 | # Copyright (C) 2021 Wind River Systems, Inc. | ||
7 | # | ||
8 | # SPDX-License-Identifier: GPL-2.0-only | ||
9 | # | ||
10 | |||
11 | import sys | ||
12 | import os | ||
13 | import argparse | ||
14 | import logging | ||
15 | import json | ||
16 | import shutil | ||
17 | import tempfile | ||
18 | import tarfile | ||
19 | from collections import OrderedDict | ||
20 | |||
21 | scripts_path = os.path.dirname(__file__) | ||
22 | lib_path = scripts_path + '/../lib' | ||
23 | sys.path = sys.path + [lib_path] | ||
24 | |||
25 | import scriptutils | ||
26 | logger = scriptutils.logger_create(os.path.basename(__file__)) | ||
27 | |||
28 | import argparse_oe | ||
29 | import scriptpath | ||
30 | bitbakepath = scriptpath.add_bitbake_lib_path() | ||
31 | if not bitbakepath: | ||
32 | logger.error("Unable to find bitbake by searching parent directory of this script or PATH") | ||
33 | sys.exit(1) | ||
34 | logger.debug('Using standard bitbake path %s' % bitbakepath) | ||
35 | scriptpath.add_oe_lib_path() | ||
36 | |||
37 | import bb.tinfoil | ||
38 | import bb.utils | ||
39 | import oe.utils | ||
40 | import oe.recipeutils | ||
41 | |||
42 | def get_pkg_list(manifest): | ||
43 | pkglist = [] | ||
44 | with open(manifest, 'r') as f: | ||
45 | for line in f: | ||
46 | linesplit = line.split() | ||
47 | if len(linesplit) == 3: | ||
48 | # manifest file | ||
49 | pkglist.append(linesplit[0]) | ||
50 | elif len(linesplit) == 1: | ||
51 | # build dependency file | ||
52 | pkglist.append(linesplit[0]) | ||
53 | return sorted(pkglist) | ||
54 | |||
55 | def list_packages(args): | ||
56 | pkglist = get_pkg_list(args.manifest) | ||
57 | for pkg in pkglist: | ||
58 | print('%s' % pkg) | ||
59 | |||
60 | def pkg2recipe(tinfoil, pkg): | ||
61 | if "-native" in pkg: | ||
62 | logger.info('skipping %s' % pkg) | ||
63 | return None | ||
64 | |||
65 | pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR') | ||
66 | pkgdatafile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg) | ||
67 | logger.debug('pkgdatafile %s' % pkgdatafile) | ||
68 | try: | ||
69 | f = open(pkgdatafile, 'r') | ||
70 | for line in f: | ||
71 | if line.startswith('PN:'): | ||
72 | recipe = line.split(':', 1)[1].strip() | ||
73 | return recipe | ||
74 | except Exception: | ||
75 | logger.warning('%s is missing' % pkgdatafile) | ||
76 | return None | ||
77 | |||
78 | def get_recipe_list(manifest, tinfoil): | ||
79 | pkglist = get_pkg_list(manifest) | ||
80 | recipelist = [] | ||
81 | for pkg in pkglist: | ||
82 | recipe = pkg2recipe(tinfoil,pkg) | ||
83 | if recipe: | ||
84 | if not recipe in recipelist: | ||
85 | recipelist.append(recipe) | ||
86 | |||
87 | return sorted(recipelist) | ||
88 | |||
89 | def list_recipes(args): | ||
90 | import bb.tinfoil | ||
91 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
92 | tinfoil.logger.setLevel(logger.getEffectiveLevel()) | ||
93 | tinfoil.prepare(config_only=True) | ||
94 | recipelist = get_recipe_list(args.manifest, tinfoil) | ||
95 | for recipe in sorted(recipelist): | ||
96 | print('%s' % recipe) | ||
97 | |||
98 | def list_layers(args): | ||
99 | |||
100 | def find_git_repo(pth): | ||
101 | checkpth = pth | ||
102 | while checkpth != os.sep: | ||
103 | if os.path.exists(os.path.join(checkpth, '.git')): | ||
104 | return checkpth | ||
105 | checkpth = os.path.dirname(checkpth) | ||
106 | return None | ||
107 | |||
108 | def get_git_remote_branch(repodir): | ||
109 | try: | ||
110 | stdout, _ = bb.process.run(['git', 'rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{u}'], cwd=repodir) | ||
111 | except bb.process.ExecutionError as e: | ||
112 | stdout = None | ||
113 | if stdout: | ||
114 | return stdout.strip() | ||
115 | else: | ||
116 | return None | ||
117 | |||
118 | def get_git_head_commit(repodir): | ||
119 | try: | ||
120 | stdout, _ = bb.process.run(['git', 'rev-parse', 'HEAD'], cwd=repodir) | ||
121 | except bb.process.ExecutionError as e: | ||
122 | stdout = None | ||
123 | if stdout: | ||
124 | return stdout.strip() | ||
125 | else: | ||
126 | return None | ||
127 | |||
128 | def get_git_repo_url(repodir, remote='origin'): | ||
129 | import bb.process | ||
130 | # Try to get upstream repo location from origin remote | ||
131 | try: | ||
132 | stdout, _ = bb.process.run(['git', 'remote', '-v'], cwd=repodir) | ||
133 | except bb.process.ExecutionError as e: | ||
134 | stdout = None | ||
135 | if stdout: | ||
136 | for line in stdout.splitlines(): | ||
137 | splitline = line.split() | ||
138 | if len(splitline) > 1: | ||
139 | if splitline[0] == remote and scriptutils.is_src_url(splitline[1]): | ||
140 | return splitline[1] | ||
141 | return None | ||
142 | |||
143 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
144 | tinfoil.logger.setLevel(logger.getEffectiveLevel()) | ||
145 | tinfoil.prepare(config_only=False) | ||
146 | layers = OrderedDict() | ||
147 | for layerdir in tinfoil.config_data.getVar('BBLAYERS').split(): | ||
148 | layerdata = OrderedDict() | ||
149 | layername = os.path.basename(layerdir) | ||
150 | logger.debug('layername %s, layerdir %s' % (layername, layerdir)) | ||
151 | if layername in layers: | ||
152 | logger.warning('layername %s is not unique in configuration' % layername) | ||
153 | layername = os.path.basename(os.path.dirname(layerdir)) + '_' + os.path.basename(layerdir) | ||
154 | logger.debug('trying layername %s' % layername) | ||
155 | if layername in layers: | ||
156 | logger.error('Layer name %s is not unique in configuration' % layername) | ||
157 | sys.exit(2) | ||
158 | repodir = find_git_repo(layerdir) | ||
159 | if repodir: | ||
160 | remotebranch = get_git_remote_branch(repodir) | ||
161 | remote = 'origin' | ||
162 | if remotebranch and '/' in remotebranch: | ||
163 | rbsplit = remotebranch.split('/', 1) | ||
164 | layerdata['actual_branch'] = rbsplit[1] | ||
165 | remote = rbsplit[0] | ||
166 | layerdata['vcs_url'] = get_git_repo_url(repodir, remote) | ||
167 | if os.path.abspath(repodir) != os.path.abspath(layerdir): | ||
168 | layerdata['vcs_subdir'] = os.path.relpath(layerdir, repodir) | ||
169 | commit = get_git_head_commit(repodir) | ||
170 | if commit: | ||
171 | layerdata['vcs_commit'] = commit | ||
172 | layers[layername] = layerdata | ||
173 | |||
174 | json.dump(layers, args.output, indent=2) | ||
175 | |||
176 | def get_recipe(args): | ||
177 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
178 | tinfoil.logger.setLevel(logger.getEffectiveLevel()) | ||
179 | tinfoil.prepare(config_only=True) | ||
180 | |||
181 | recipe = pkg2recipe(tinfoil, args.package) | ||
182 | print(' %s package provided by %s' % (args.package, recipe)) | ||
183 | |||
184 | def pkg_dependencies(args): | ||
185 | def get_recipe_info(tinfoil, recipe): | ||
186 | try: | ||
187 | info = tinfoil.get_recipe_info(recipe) | ||
188 | except Exception: | ||
189 | logger.error('Failed to get recipe info for: %s' % recipe) | ||
190 | sys.exit(1) | ||
191 | if not info: | ||
192 | logger.warning('No recipe info found for: %s' % recipe) | ||
193 | sys.exit(1) | ||
194 | append_files = tinfoil.get_file_appends(info.fn) | ||
195 | appends = True | ||
196 | data = tinfoil.parse_recipe_file(info.fn, appends, append_files) | ||
197 | data.pn = info.pn | ||
198 | data.pv = info.pv | ||
199 | return data | ||
200 | |||
201 | def find_dependencies(tinfoil, assume_provided, recipe_info, packages, rn, order): | ||
202 | spaces = ' ' * order | ||
203 | data = recipe_info[rn] | ||
204 | if args.native: | ||
205 | logger.debug('%s- %s' % (spaces, data.pn)) | ||
206 | elif "-native" not in data.pn: | ||
207 | if "cross" not in data.pn: | ||
208 | logger.debug('%s- %s' % (spaces, data.pn)) | ||
209 | |||
210 | depends = [] | ||
211 | for dep in data.depends: | ||
212 | if dep not in assume_provided: | ||
213 | depends.append(dep) | ||
214 | |||
215 | # First find all dependencies not in package list. | ||
216 | for dep in depends: | ||
217 | if dep not in packages: | ||
218 | packages.append(dep) | ||
219 | dep_data = get_recipe_info(tinfoil, dep) | ||
220 | # Do this once now to reduce the number of bitbake calls. | ||
221 | dep_data.depends = dep_data.getVar('DEPENDS').split() | ||
222 | recipe_info[dep] = dep_data | ||
223 | |||
224 | # Then recursively analyze all of the dependencies for the current recipe. | ||
225 | for dep in depends: | ||
226 | find_dependencies(tinfoil, assume_provided, recipe_info, packages, dep, order + 1) | ||
227 | |||
228 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
229 | tinfoil.logger.setLevel(logger.getEffectiveLevel()) | ||
230 | tinfoil.prepare() | ||
231 | |||
232 | assume_provided = tinfoil.config_data.getVar('ASSUME_PROVIDED').split() | ||
233 | logger.debug('assumed provided:') | ||
234 | for ap in sorted(assume_provided): | ||
235 | logger.debug(' - %s' % ap) | ||
236 | |||
237 | recipe = pkg2recipe(tinfoil, args.package) | ||
238 | data = get_recipe_info(tinfoil, recipe) | ||
239 | data.depends = [] | ||
240 | depends = data.getVar('DEPENDS').split() | ||
241 | for dep in depends: | ||
242 | if dep not in assume_provided: | ||
243 | data.depends.append(dep) | ||
244 | |||
245 | recipe_info = dict([(recipe, data)]) | ||
246 | packages = [] | ||
247 | find_dependencies(tinfoil, assume_provided, recipe_info, packages, recipe, order=1) | ||
248 | |||
249 | print('\nThe following packages are required to build %s' % recipe) | ||
250 | for p in sorted(packages): | ||
251 | data = recipe_info[p] | ||
252 | if "-native" not in data.pn: | ||
253 | if "cross" not in data.pn: | ||
254 | print(" %s (%s)" % (data.pn,p)) | ||
255 | |||
256 | if args.native: | ||
257 | print('\nThe following native packages are required to build %s' % recipe) | ||
258 | for p in sorted(packages): | ||
259 | data = recipe_info[p] | ||
260 | if "-native" in data.pn: | ||
261 | print(" %s(%s)" % (data.pn,p)) | ||
262 | if "cross" in data.pn: | ||
263 | print(" %s(%s)" % (data.pn,p)) | ||
264 | |||
265 | def default_config(): | ||
266 | vlist = OrderedDict() | ||
267 | vlist['PV'] = 'yes' | ||
268 | vlist['SUMMARY'] = 'no' | ||
269 | vlist['DESCRIPTION'] = 'no' | ||
270 | vlist['SECTION'] = 'no' | ||
271 | vlist['LICENSE'] = 'yes' | ||
272 | vlist['HOMEPAGE'] = 'no' | ||
273 | vlist['BUGTRACKER'] = 'no' | ||
274 | vlist['PROVIDES'] = 'no' | ||
275 | vlist['BBCLASSEXTEND'] = 'no' | ||
276 | vlist['DEPENDS'] = 'no' | ||
277 | vlist['PACKAGECONFIG'] = 'no' | ||
278 | vlist['SRC_URI'] = 'yes' | ||
279 | vlist['SRCREV'] = 'yes' | ||
280 | vlist['EXTRA_OECONF'] = 'no' | ||
281 | vlist['EXTRA_OESCONS'] = 'no' | ||
282 | vlist['EXTRA_OECMAKE'] = 'no' | ||
283 | vlist['EXTRA_OEMESON'] = 'no' | ||
284 | |||
285 | clist = OrderedDict() | ||
286 | clist['variables'] = vlist | ||
287 | clist['filepath'] = 'no' | ||
288 | clist['sha256sum'] = 'no' | ||
289 | clist['layerdir'] = 'no' | ||
290 | clist['layer'] = 'no' | ||
291 | clist['inherits'] = 'no' | ||
292 | clist['source_urls'] = 'no' | ||
293 | clist['packageconfig_opts'] = 'no' | ||
294 | clist['patches'] = 'no' | ||
295 | clist['packagedir'] = 'no' | ||
296 | return clist | ||
297 | |||
298 | def dump_config(args): | ||
299 | config = default_config() | ||
300 | f = open('default_config.json', 'w') | ||
301 | json.dump(config, f, indent=2) | ||
302 | logger.info('Default config list dumped to default_config.json') | ||
303 | |||
304 | def export_manifest_info(args): | ||
305 | |||
306 | def handle_value(value): | ||
307 | if value: | ||
308 | return oe.utils.squashspaces(value) | ||
309 | else: | ||
310 | return value | ||
311 | |||
312 | if args.config: | ||
313 | logger.debug('config: %s' % args.config) | ||
314 | f = open(args.config, 'r') | ||
315 | config = json.load(f, object_pairs_hook=OrderedDict) | ||
316 | else: | ||
317 | config = default_config() | ||
318 | if logger.isEnabledFor(logging.DEBUG): | ||
319 | print('Configuration:') | ||
320 | json.dump(config, sys.stdout, indent=2) | ||
321 | print('') | ||
322 | |||
323 | tmpoutdir = tempfile.mkdtemp(prefix=os.path.basename(__file__)+'-') | ||
324 | logger.debug('tmp dir: %s' % tmpoutdir) | ||
325 | |||
326 | # export manifest | ||
327 | shutil.copy2(args.manifest,os.path.join(tmpoutdir, "manifest")) | ||
328 | |||
329 | with bb.tinfoil.Tinfoil(tracking=True) as tinfoil: | ||
330 | tinfoil.logger.setLevel(logger.getEffectiveLevel()) | ||
331 | tinfoil.prepare(config_only=False) | ||
332 | |||
333 | pkglist = get_pkg_list(args.manifest) | ||
334 | # export pkg list | ||
335 | f = open(os.path.join(tmpoutdir, "pkgs"), 'w') | ||
336 | for pkg in pkglist: | ||
337 | f.write('%s\n' % pkg) | ||
338 | f.close() | ||
339 | |||
340 | recipelist = [] | ||
341 | for pkg in pkglist: | ||
342 | recipe = pkg2recipe(tinfoil,pkg) | ||
343 | if recipe: | ||
344 | if not recipe in recipelist: | ||
345 | recipelist.append(recipe) | ||
346 | recipelist.sort() | ||
347 | # export recipe list | ||
348 | f = open(os.path.join(tmpoutdir, "recipes"), 'w') | ||
349 | for recipe in recipelist: | ||
350 | f.write('%s\n' % recipe) | ||
351 | f.close() | ||
352 | |||
353 | try: | ||
354 | rvalues = OrderedDict() | ||
355 | for pn in sorted(recipelist): | ||
356 | logger.debug('Package: %s' % pn) | ||
357 | rd = tinfoil.parse_recipe(pn) | ||
358 | |||
359 | rvalues[pn] = OrderedDict() | ||
360 | |||
361 | for varname in config['variables']: | ||
362 | if config['variables'][varname] == 'yes': | ||
363 | rvalues[pn][varname] = handle_value(rd.getVar(varname)) | ||
364 | |||
365 | fpth = rd.getVar('FILE') | ||
366 | layerdir = oe.recipeutils.find_layerdir(fpth) | ||
367 | if config['filepath'] == 'yes': | ||
368 | rvalues[pn]['filepath'] = os.path.relpath(fpth, layerdir) | ||
369 | if config['sha256sum'] == 'yes': | ||
370 | rvalues[pn]['sha256sum'] = bb.utils.sha256_file(fpth) | ||
371 | |||
372 | if config['layerdir'] == 'yes': | ||
373 | rvalues[pn]['layerdir'] = layerdir | ||
374 | |||
375 | if config['layer'] == 'yes': | ||
376 | rvalues[pn]['layer'] = os.path.basename(layerdir) | ||
377 | |||
378 | if config['inherits'] == 'yes': | ||
379 | gr = set(tinfoil.config_data.getVar("__inherit_cache") or []) | ||
380 | lr = set(rd.getVar("__inherit_cache") or []) | ||
381 | rvalues[pn]['inherits'] = sorted({os.path.splitext(os.path.basename(r))[0] for r in lr if r not in gr}) | ||
382 | |||
383 | if config['source_urls'] == 'yes': | ||
384 | rvalues[pn]['source_urls'] = [] | ||
385 | for url in (rd.getVar('SRC_URI') or '').split(): | ||
386 | if not url.startswith('file://'): | ||
387 | url = url.split(';')[0] | ||
388 | rvalues[pn]['source_urls'].append(url) | ||
389 | |||
390 | if config['packageconfig_opts'] == 'yes': | ||
391 | rvalues[pn]['packageconfig_opts'] = OrderedDict() | ||
392 | for key in rd.getVarFlags('PACKAGECONFIG').keys(): | ||
393 | if key == 'doc': | ||
394 | continue | ||
395 | rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key) | ||
396 | |||
397 | if config['patches'] == 'yes': | ||
398 | patches = oe.recipeutils.get_recipe_patches(rd) | ||
399 | rvalues[pn]['patches'] = [] | ||
400 | if patches: | ||
401 | recipeoutdir = os.path.join(tmpoutdir, pn, 'patches') | ||
402 | bb.utils.mkdirhier(recipeoutdir) | ||
403 | for patch in patches: | ||
404 | # Patches may be in other layers too | ||
405 | patchlayerdir = oe.recipeutils.find_layerdir(patch) | ||
406 | # patchlayerdir will be None for remote patches, which we ignore | ||
407 | # (since currently they are considered as part of sources) | ||
408 | if patchlayerdir: | ||
409 | rvalues[pn]['patches'].append((os.path.basename(patchlayerdir), os.path.relpath(patch, patchlayerdir))) | ||
410 | shutil.copy(patch, recipeoutdir) | ||
411 | |||
412 | if config['packagedir'] == 'yes': | ||
413 | pn_dir = os.path.join(tmpoutdir, pn) | ||
414 | bb.utils.mkdirhier(pn_dir) | ||
415 | f = open(os.path.join(pn_dir, 'recipe.json'), 'w') | ||
416 | json.dump(rvalues[pn], f, indent=2) | ||
417 | f.close() | ||
418 | |||
419 | with open(os.path.join(tmpoutdir, 'recipes.json'), 'w') as f: | ||
420 | json.dump(rvalues, f, indent=2) | ||
421 | |||
422 | if args.output: | ||
423 | outname = os.path.basename(args.output) | ||
424 | else: | ||
425 | outname = os.path.splitext(os.path.basename(args.manifest))[0] | ||
426 | if outname.endswith('.tar.gz'): | ||
427 | outname = outname[:-7] | ||
428 | elif outname.endswith('.tgz'): | ||
429 | outname = outname[:-4] | ||
430 | |||
431 | tarfn = outname | ||
432 | if tarfn.endswith(os.sep): | ||
433 | tarfn = tarfn[:-1] | ||
434 | if not tarfn.endswith(('.tar.gz', '.tgz')): | ||
435 | tarfn += '.tar.gz' | ||
436 | with open(tarfn, 'wb') as f: | ||
437 | with tarfile.open(None, "w:gz", f) as tar: | ||
438 | tar.add(tmpoutdir, outname) | ||
439 | finally: | ||
440 | shutil.rmtree(tmpoutdir) | ||
441 | |||
442 | |||
443 | def main(): | ||
444 | parser = argparse_oe.ArgumentParser(description="Image manifest utility", | ||
445 | epilog="Use %(prog)s <subcommand> --help to get help on a specific command") | ||
446 | parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true') | ||
447 | parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true') | ||
448 | subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>') | ||
449 | subparsers.required = True | ||
450 | |||
451 | # get recipe info | ||
452 | parser_get_recipes = subparsers.add_parser('recipe-info', | ||
453 | help='Get recipe info', | ||
454 | description='Get recipe information for a package') | ||
455 | parser_get_recipes.add_argument('package', help='Package name') | ||
456 | parser_get_recipes.set_defaults(func=get_recipe) | ||
457 | |||
458 | # list runtime dependencies | ||
459 | parser_pkg_dep = subparsers.add_parser('list-depends', | ||
460 | help='List dependencies', | ||
461 | description='List dependencies required to build the package') | ||
462 | parser_pkg_dep.add_argument('--native', help='also print native and cross packages', action='store_true') | ||
463 | parser_pkg_dep.add_argument('package', help='Package name') | ||
464 | parser_pkg_dep.set_defaults(func=pkg_dependencies) | ||
465 | |||
466 | # list recipes | ||
467 | parser_recipes = subparsers.add_parser('list-recipes', | ||
468 | help='List recipes producing packages within an image', | ||
469 | description='Lists recipes producing the packages that went into an image, using the manifest and pkgdata') | ||
470 | parser_recipes.add_argument('manifest', help='Manifest file') | ||
471 | parser_recipes.set_defaults(func=list_recipes) | ||
472 | |||
473 | # list packages | ||
474 | parser_packages = subparsers.add_parser('list-packages', | ||
475 | help='List packages within an image', | ||
476 | description='Lists packages that went into an image, using the manifest') | ||
477 | parser_packages.add_argument('manifest', help='Manifest file') | ||
478 | parser_packages.set_defaults(func=list_packages) | ||
479 | |||
480 | # list layers | ||
481 | parser_layers = subparsers.add_parser('list-layers', | ||
482 | help='List included layers', | ||
483 | description='Lists included layers') | ||
484 | parser_layers.add_argument('-o', '--output', help='Output file - defaults to stdout if not specified', | ||
485 | default=sys.stdout, type=argparse.FileType('w')) | ||
486 | parser_layers.set_defaults(func=list_layers) | ||
487 | |||
488 | # dump default configuration file | ||
489 | parser_dconfig = subparsers.add_parser('dump-config', | ||
490 | help='Dump default config', | ||
491 | description='Dump default config to default_config.json') | ||
492 | parser_dconfig.set_defaults(func=dump_config) | ||
493 | |||
494 | # export recipe info for packages in manifest | ||
495 | parser_export = subparsers.add_parser('manifest-info', | ||
496 | help='Export recipe info for a manifest', | ||
497 | description='Export recipe information using the manifest') | ||
498 | parser_export.add_argument('-c', '--config', help='load config from json file') | ||
499 | parser_export.add_argument('-o', '--output', help='Output file (tarball) - defaults to manifest name if not specified') | ||
500 | parser_export.add_argument('manifest', help='Manifest file') | ||
501 | parser_export.set_defaults(func=export_manifest_info) | ||
502 | |||
503 | args = parser.parse_args() | ||
504 | |||
505 | if args.debug: | ||
506 | logger.setLevel(logging.DEBUG) | ||
507 | logger.debug("Debug Enabled") | ||
508 | elif args.quiet: | ||
509 | logger.setLevel(logging.ERROR) | ||
510 | |||
511 | ret = args.func(args) | ||
512 | |||
513 | return ret | ||
514 | |||
515 | |||
516 | if __name__ == "__main__": | ||
517 | try: | ||
518 | ret = main() | ||
519 | except Exception: | ||
520 | ret = 1 | ||
521 | import traceback | ||
522 | traceback.print_exc() | ||
523 | sys.exit(ret) | ||
diff --git a/scripts/contrib/list-packageconfig-flags.py b/scripts/contrib/list-packageconfig-flags.py index d6de4dc84d..bb288e9099 100755 --- a/scripts/contrib/list-packageconfig-flags.py +++ b/scripts/contrib/list-packageconfig-flags.py | |||
@@ -33,7 +33,7 @@ import bb.tinfoil | |||
33 | def get_fnlist(bbhandler, pkg_pn, preferred): | 33 | def get_fnlist(bbhandler, pkg_pn, preferred): |
34 | ''' Get all recipe file names ''' | 34 | ''' Get all recipe file names ''' |
35 | if preferred: | 35 | if preferred: |
36 | (latest_versions, preferred_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecaches[''], pkg_pn) | 36 | (latest_versions, preferred_versions, required_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecaches[''], pkg_pn) |
37 | 37 | ||
38 | fn_list = [] | 38 | fn_list = [] |
39 | for pn in sorted(pkg_pn): | 39 | for pn in sorted(pkg_pn): |
diff --git a/scripts/contrib/oe-build-perf-report-email.py b/scripts/contrib/oe-build-perf-report-email.py index de3862c897..7192113c28 100755 --- a/scripts/contrib/oe-build-perf-report-email.py +++ b/scripts/contrib/oe-build-perf-report-email.py | |||
@@ -19,8 +19,6 @@ import socket | |||
19 | import subprocess | 19 | import subprocess |
20 | import sys | 20 | import sys |
21 | import tempfile | 21 | import tempfile |
22 | from email.mime.image import MIMEImage | ||
23 | from email.mime.multipart import MIMEMultipart | ||
24 | from email.mime.text import MIMEText | 22 | from email.mime.text import MIMEText |
25 | 23 | ||
26 | 24 | ||
@@ -29,30 +27,6 @@ logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") | |||
29 | log = logging.getLogger('oe-build-perf-report') | 27 | log = logging.getLogger('oe-build-perf-report') |
30 | 28 | ||
31 | 29 | ||
32 | # Find js scaper script | ||
33 | SCRAPE_JS = os.path.join(os.path.dirname(__file__), '..', 'lib', 'build_perf', | ||
34 | 'scrape-html-report.js') | ||
35 | if not os.path.isfile(SCRAPE_JS): | ||
36 | log.error("Unableto find oe-build-perf-report-scrape.js") | ||
37 | sys.exit(1) | ||
38 | |||
39 | |||
40 | class ReportError(Exception): | ||
41 | """Local errors""" | ||
42 | pass | ||
43 | |||
44 | |||
45 | def check_utils(): | ||
46 | """Check that all needed utils are installed in the system""" | ||
47 | missing = [] | ||
48 | for cmd in ('phantomjs', 'optipng'): | ||
49 | if not shutil.which(cmd): | ||
50 | missing.append(cmd) | ||
51 | if missing: | ||
52 | log.error("The following tools are missing: %s", ' '.join(missing)) | ||
53 | sys.exit(1) | ||
54 | |||
55 | |||
56 | def parse_args(argv): | 30 | def parse_args(argv): |
57 | """Parse command line arguments""" | 31 | """Parse command line arguments""" |
58 | description = """Email build perf test report""" | 32 | description = """Email build perf test report""" |
@@ -77,137 +51,19 @@ def parse_args(argv): | |||
77 | "the email parts") | 51 | "the email parts") |
78 | parser.add_argument('--text', | 52 | parser.add_argument('--text', |
79 | help="Plain text message") | 53 | help="Plain text message") |
80 | parser.add_argument('--html', | ||
81 | help="HTML peport generated by oe-build-perf-report") | ||
82 | parser.add_argument('--phantomjs-args', action='append', | ||
83 | help="Extra command line arguments passed to PhantomJS") | ||
84 | 54 | ||
85 | args = parser.parse_args(argv) | 55 | args = parser.parse_args(argv) |
86 | 56 | ||
87 | if not args.html and not args.text: | 57 | if not args.text: |
88 | parser.error("Please specify --html and/or --text") | 58 | parser.error("Please specify --text") |
89 | 59 | ||
90 | return args | 60 | return args |
91 | 61 | ||
92 | 62 | ||
93 | def decode_png(infile, outfile): | 63 | def send_email(text_fn, subject, recipients, copy=[], blind_copy=[]): |
94 | """Parse/decode/optimize png data from a html element""" | ||
95 | with open(infile) as f: | ||
96 | raw_data = f.read() | ||
97 | |||
98 | # Grab raw base64 data | ||
99 | b64_data = re.sub('^.*href="data:image/png;base64,', '', raw_data, 1) | ||
100 | b64_data = re.sub('">.+$', '', b64_data, 1) | ||
101 | |||
102 | # Replace file with proper decoded png | ||
103 | with open(outfile, 'wb') as f: | ||
104 | f.write(base64.b64decode(b64_data)) | ||
105 | |||
106 | subprocess.check_output(['optipng', outfile], stderr=subprocess.STDOUT) | ||
107 | |||
108 | |||
109 | def mangle_html_report(infile, outfile, pngs): | ||
110 | """Mangle html file into a email compatible format""" | ||
111 | paste = True | ||
112 | png_dir = os.path.dirname(outfile) | ||
113 | with open(infile) as f_in: | ||
114 | with open(outfile, 'w') as f_out: | ||
115 | for line in f_in.readlines(): | ||
116 | stripped = line.strip() | ||
117 | # Strip out scripts | ||
118 | if stripped == '<!--START-OF-SCRIPTS-->': | ||
119 | paste = False | ||
120 | elif stripped == '<!--END-OF-SCRIPTS-->': | ||
121 | paste = True | ||
122 | elif paste: | ||
123 | if re.match('^.+href="data:image/png;base64', stripped): | ||
124 | # Strip out encoded pngs (as they're huge in size) | ||
125 | continue | ||
126 | elif 'www.gstatic.com' in stripped: | ||
127 | # HACK: drop references to external static pages | ||
128 | continue | ||
129 | |||
130 | # Replace charts with <img> elements | ||
131 | match = re.match('<div id="(?P<id>\w+)"', stripped) | ||
132 | if match and match.group('id') in pngs: | ||
133 | f_out.write('<img src="cid:{}"\n'.format(match.group('id'))) | ||
134 | else: | ||
135 | f_out.write(line) | ||
136 | |||
137 | |||
138 | def scrape_html_report(report, outdir, phantomjs_extra_args=None): | ||
139 | """Scrape html report into a format sendable by email""" | ||
140 | tmpdir = tempfile.mkdtemp(dir='.') | ||
141 | log.debug("Using tmpdir %s for phantomjs output", tmpdir) | ||
142 | |||
143 | if not os.path.isdir(outdir): | ||
144 | os.mkdir(outdir) | ||
145 | if os.path.splitext(report)[1] not in ('.html', '.htm'): | ||
146 | raise ReportError("Invalid file extension for report, needs to be " | ||
147 | "'.html' or '.htm'") | ||
148 | |||
149 | try: | ||
150 | log.info("Scraping HTML report with PhangomJS") | ||
151 | extra_args = phantomjs_extra_args if phantomjs_extra_args else [] | ||
152 | subprocess.check_output(['phantomjs', '--debug=true'] + extra_args + | ||
153 | [SCRAPE_JS, report, tmpdir], | ||
154 | stderr=subprocess.STDOUT) | ||
155 | |||
156 | pngs = [] | ||
157 | images = [] | ||
158 | for fname in os.listdir(tmpdir): | ||
159 | base, ext = os.path.splitext(fname) | ||
160 | if ext == '.png': | ||
161 | log.debug("Decoding %s", fname) | ||
162 | decode_png(os.path.join(tmpdir, fname), | ||
163 | os.path.join(outdir, fname)) | ||
164 | pngs.append(base) | ||
165 | images.append(fname) | ||
166 | elif ext in ('.html', '.htm'): | ||
167 | report_file = fname | ||
168 | else: | ||
169 | log.warning("Unknown file extension: '%s'", ext) | ||
170 | #shutil.move(os.path.join(tmpdir, fname), outdir) | ||
171 | |||
172 | log.debug("Mangling html report file %s", report_file) | ||
173 | mangle_html_report(os.path.join(tmpdir, report_file), | ||
174 | os.path.join(outdir, report_file), pngs) | ||
175 | return (os.path.join(outdir, report_file), | ||
176 | [os.path.join(outdir, i) for i in images]) | ||
177 | finally: | ||
178 | shutil.rmtree(tmpdir) | ||
179 | |||
180 | def send_email(text_fn, html_fn, image_fns, subject, recipients, copy=[], | ||
181 | blind_copy=[]): | ||
182 | """Send email""" | ||
183 | # Generate email message | 64 | # Generate email message |
184 | text_msg = html_msg = None | 65 | with open(text_fn) as f: |
185 | if text_fn: | 66 | msg = MIMEText("Yocto build performance test report.\n" + f.read(), 'plain') |
186 | with open(text_fn) as f: | ||
187 | text_msg = MIMEText("Yocto build performance test report.\n" + | ||
188 | f.read(), 'plain') | ||
189 | if html_fn: | ||
190 | html_msg = msg = MIMEMultipart('related') | ||
191 | with open(html_fn) as f: | ||
192 | html_msg.attach(MIMEText(f.read(), 'html')) | ||
193 | for img_fn in image_fns: | ||
194 | # Expect that content id is same as the filename | ||
195 | cid = os.path.splitext(os.path.basename(img_fn))[0] | ||
196 | with open(img_fn, 'rb') as f: | ||
197 | image_msg = MIMEImage(f.read()) | ||
198 | image_msg['Content-ID'] = '<{}>'.format(cid) | ||
199 | html_msg.attach(image_msg) | ||
200 | |||
201 | if text_msg and html_msg: | ||
202 | msg = MIMEMultipart('alternative') | ||
203 | msg.attach(text_msg) | ||
204 | msg.attach(html_msg) | ||
205 | elif text_msg: | ||
206 | msg = text_msg | ||
207 | elif html_msg: | ||
208 | msg = html_msg | ||
209 | else: | ||
210 | raise ReportError("Neither plain text nor html body specified") | ||
211 | 67 | ||
212 | pw_data = pwd.getpwuid(os.getuid()) | 68 | pw_data = pwd.getpwuid(os.getuid()) |
213 | full_name = pw_data.pw_gecos.split(',')[0] | 69 | full_name = pw_data.pw_gecos.split(',')[0] |
@@ -234,8 +90,6 @@ def main(argv=None): | |||
234 | if args.debug: | 90 | if args.debug: |
235 | log.setLevel(logging.DEBUG) | 91 | log.setLevel(logging.DEBUG) |
236 | 92 | ||
237 | check_utils() | ||
238 | |||
239 | if args.outdir: | 93 | if args.outdir: |
240 | outdir = args.outdir | 94 | outdir = args.outdir |
241 | if not os.path.exists(outdir): | 95 | if not os.path.exists(outdir): |
@@ -245,25 +99,16 @@ def main(argv=None): | |||
245 | 99 | ||
246 | try: | 100 | try: |
247 | log.debug("Storing email parts in %s", outdir) | 101 | log.debug("Storing email parts in %s", outdir) |
248 | html_report = images = None | ||
249 | if args.html: | ||
250 | html_report, images = scrape_html_report(args.html, outdir, | ||
251 | args.phantomjs_args) | ||
252 | |||
253 | if args.to: | 102 | if args.to: |
254 | log.info("Sending email to %s", ', '.join(args.to)) | 103 | log.info("Sending email to %s", ', '.join(args.to)) |
255 | if args.cc: | 104 | if args.cc: |
256 | log.info("Copying to %s", ', '.join(args.cc)) | 105 | log.info("Copying to %s", ', '.join(args.cc)) |
257 | if args.bcc: | 106 | if args.bcc: |
258 | log.info("Blind copying to %s", ', '.join(args.bcc)) | 107 | log.info("Blind copying to %s", ', '.join(args.bcc)) |
259 | send_email(args.text, html_report, images, args.subject, | 108 | send_email(args.text, args.subject, args.to, args.cc, args.bcc) |
260 | args.to, args.cc, args.bcc) | ||
261 | except subprocess.CalledProcessError as err: | 109 | except subprocess.CalledProcessError as err: |
262 | log.error("%s, with output:\n%s", str(err), err.output.decode()) | 110 | log.error("%s, with output:\n%s", str(err), err.output.decode()) |
263 | return 1 | 111 | return 1 |
264 | except ReportError as err: | ||
265 | log.error(err) | ||
266 | return 1 | ||
267 | finally: | 112 | finally: |
268 | if not args.outdir: | 113 | if not args.outdir: |
269 | log.debug("Wiping %s", outdir) | 114 | log.debug("Wiping %s", outdir) |
diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py index 62c509f51c..bceae06561 100755 --- a/scripts/contrib/patchreview.py +++ b/scripts/contrib/patchreview.py | |||
@@ -1,14 +1,25 @@ | |||
1 | #! /usr/bin/env python3 | 1 | #! /usr/bin/env python3 |
2 | # | 2 | # |
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 6 | # |
5 | 7 | ||
8 | import argparse | ||
9 | import collections | ||
10 | import json | ||
11 | import os | ||
12 | import os.path | ||
13 | import pathlib | ||
14 | import re | ||
15 | import subprocess | ||
16 | |||
6 | # TODO | 17 | # TODO |
7 | # - option to just list all broken files | 18 | # - option to just list all broken files |
8 | # - test suite | 19 | # - test suite |
9 | # - validate signed-off-by | 20 | # - validate signed-off-by |
10 | 21 | ||
11 | status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied") | 22 | status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied", "inactive-upstream") |
12 | 23 | ||
13 | class Result: | 24 | class Result: |
14 | # Whether the patch has an Upstream-Status or not | 25 | # Whether the patch has an Upstream-Status or not |
@@ -33,20 +44,18 @@ def blame_patch(patch): | |||
33 | From a patch filename, return a list of "commit summary (author name <author | 44 | From a patch filename, return a list of "commit summary (author name <author |
34 | email>)" strings representing the history. | 45 | email>)" strings representing the history. |
35 | """ | 46 | """ |
36 | import subprocess | ||
37 | return subprocess.check_output(("git", "log", | 47 | return subprocess.check_output(("git", "log", |
38 | "--follow", "--find-renames", "--diff-filter=A", | 48 | "--follow", "--find-renames", "--diff-filter=A", |
39 | "--format=%s (%aN <%aE>)", | 49 | "--format=%s (%aN <%aE>)", |
40 | "--", patch)).decode("utf-8").splitlines() | 50 | "--", patch)).decode("utf-8").splitlines() |
41 | 51 | ||
42 | def patchreview(path, patches): | 52 | def patchreview(patches): |
43 | import re, os.path | ||
44 | 53 | ||
45 | # General pattern: start of line, optional whitespace, tag with optional | 54 | # General pattern: start of line, optional whitespace, tag with optional |
46 | # hyphen or spaces, maybe a colon, some whitespace, then the value, all case | 55 | # hyphen or spaces, maybe a colon, some whitespace, then the value, all case |
47 | # insensitive. | 56 | # insensitive. |
48 | sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE) | 57 | sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE) |
49 | status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*(\w*)", re.IGNORECASE | re.MULTILINE) | 58 | status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*([\w-]*)", re.IGNORECASE | re.MULTILINE) |
50 | cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE) | 59 | cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE) |
51 | cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE) | 60 | cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE) |
52 | 61 | ||
@@ -54,11 +63,10 @@ def patchreview(path, patches): | |||
54 | 63 | ||
55 | for patch in patches: | 64 | for patch in patches: |
56 | 65 | ||
57 | fullpath = os.path.join(path, patch) | ||
58 | result = Result() | 66 | result = Result() |
59 | results[fullpath] = result | 67 | results[patch] = result |
60 | 68 | ||
61 | content = open(fullpath, encoding='ascii', errors='ignore').read() | 69 | content = open(patch, encoding='ascii', errors='ignore').read() |
62 | 70 | ||
63 | # Find the Signed-off-by tag | 71 | # Find the Signed-off-by tag |
64 | match = sob_re.search(content) | 72 | match = sob_re.search(content) |
@@ -191,29 +199,56 @@ Patches in Pending state: %s""" % (total_patches, | |||
191 | def histogram(results): | 199 | def histogram(results): |
192 | from toolz import recipes, dicttoolz | 200 | from toolz import recipes, dicttoolz |
193 | import math | 201 | import math |
202 | |||
194 | counts = recipes.countby(lambda r: r.upstream_status, results.values()) | 203 | counts = recipes.countby(lambda r: r.upstream_status, results.values()) |
195 | bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts) | 204 | bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts) |
196 | for k in bars: | 205 | for k in bars: |
197 | print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k])) | 206 | print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k])) |
198 | 207 | ||
208 | def find_layers(candidate): | ||
209 | # candidate can either be the path to a layer directly (eg meta-intel), or a | ||
210 | # repository that contains other layers (meta-arm). We can determine what by | ||
211 | # looking for a conf/layer.conf file. If that file exists then it's a layer, | ||
212 | # otherwise its a repository of layers and we can assume they're called | ||
213 | # meta-*. | ||
214 | |||
215 | if (candidate / "conf" / "layer.conf").exists(): | ||
216 | return [candidate.absolute()] | ||
217 | else: | ||
218 | return [d.absolute() for d in candidate.iterdir() if d.is_dir() and (d.name == "meta" or d.name.startswith("meta-"))] | ||
219 | |||
220 | # TODO these don't actually handle dynamic-layers/ | ||
221 | |||
222 | def gather_patches(layers): | ||
223 | patches = [] | ||
224 | for directory in layers: | ||
225 | filenames = subprocess.check_output(("git", "-C", directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff"), universal_newlines=True).split() | ||
226 | patches += [os.path.join(directory, f) for f in filenames] | ||
227 | return patches | ||
228 | |||
229 | def count_recipes(layers): | ||
230 | count = 0 | ||
231 | for directory in layers: | ||
232 | output = subprocess.check_output(["git", "-C", directory, "ls-files", "recipes-*/**/*.bb"], universal_newlines=True) | ||
233 | count += len(output.splitlines()) | ||
234 | return count | ||
199 | 235 | ||
200 | if __name__ == "__main__": | 236 | if __name__ == "__main__": |
201 | import argparse, subprocess, os | ||
202 | |||
203 | args = argparse.ArgumentParser(description="Patch Review Tool") | 237 | args = argparse.ArgumentParser(description="Patch Review Tool") |
204 | args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches") | 238 | args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches") |
205 | args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results") | 239 | args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results") |
206 | args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram") | 240 | args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram") |
207 | args.add_argument("-j", "--json", help="update JSON") | 241 | args.add_argument("-j", "--json", help="update JSON") |
208 | args.add_argument("directory", help="directory to scan") | 242 | args.add_argument("directory", type=pathlib.Path, metavar="DIRECTORY", help="directory to scan (layer, or repository of layers)") |
209 | args = args.parse_args() | 243 | args = args.parse_args() |
210 | 244 | ||
211 | patches = subprocess.check_output(("git", "-C", args.directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff")).decode("utf-8").split() | 245 | layers = find_layers(args.directory) |
212 | results = patchreview(args.directory, patches) | 246 | print(f"Found layers {' '.join((d.name for d in layers))}") |
247 | patches = gather_patches(layers) | ||
248 | results = patchreview(patches) | ||
213 | analyse(results, want_blame=args.blame, verbose=args.verbose) | 249 | analyse(results, want_blame=args.blame, verbose=args.verbose) |
214 | 250 | ||
215 | if args.json: | 251 | if args.json: |
216 | import json, os.path, collections | ||
217 | if os.path.isfile(args.json): | 252 | if os.path.isfile(args.json): |
218 | data = json.load(open(args.json)) | 253 | data = json.load(open(args.json)) |
219 | else: | 254 | else: |
@@ -221,7 +256,11 @@ if __name__ == "__main__": | |||
221 | 256 | ||
222 | row = collections.Counter() | 257 | row = collections.Counter() |
223 | row["total"] = len(results) | 258 | row["total"] = len(results) |
224 | row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"]).decode("utf-8").strip() | 259 | row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"], universal_newlines=True).strip() |
260 | row["commit"] = subprocess.check_output(["git", "-C", args.directory, "rev-parse", "HEAD"], universal_newlines=True).strip() | ||
261 | row['commit_count'] = subprocess.check_output(["git", "-C", args.directory, "rev-list", "--count", "HEAD"], universal_newlines=True).strip() | ||
262 | row['recipe_count'] = count_recipes(layers) | ||
263 | |||
225 | for r in results.values(): | 264 | for r in results.values(): |
226 | if r.upstream_status in status_values: | 265 | if r.upstream_status in status_values: |
227 | row[r.upstream_status] += 1 | 266 | row[r.upstream_status] += 1 |
@@ -231,7 +270,7 @@ if __name__ == "__main__": | |||
231 | row['malformed-sob'] += 1 | 270 | row['malformed-sob'] += 1 |
232 | 271 | ||
233 | data.append(row) | 272 | data.append(row) |
234 | json.dump(data, open(args.json, "w")) | 273 | json.dump(data, open(args.json, "w"), sort_keys=True, indent="\t") |
235 | 274 | ||
236 | if args.histogram: | 275 | if args.histogram: |
237 | print() | 276 | print() |
diff --git a/scripts/contrib/test_build_time.sh b/scripts/contrib/test_build_time.sh index 23f238adf6..4012ac7ba7 100755 --- a/scripts/contrib/test_build_time.sh +++ b/scripts/contrib/test_build_time.sh | |||
@@ -97,7 +97,7 @@ if [ $? != 0 ] ; then | |||
97 | exit 251 | 97 | exit 251 |
98 | fi | 98 | fi |
99 | 99 | ||
100 | if [ "$BB_ENV_EXTRAWHITE" != "" ] ; then | 100 | if [ "BB_ENV_PASSTHROUGH_ADDITIONS" != "" ] ; then |
101 | echo "WARNING: you are running after sourcing the build environment script, this is not recommended" | 101 | echo "WARNING: you are running after sourcing the build environment script, this is not recommended" |
102 | fi | 102 | fi |
103 | 103 | ||
diff --git a/scripts/contrib/test_build_time_worker.sh b/scripts/contrib/test_build_time_worker.sh index 478e8b0d03..a2879d2336 100755 --- a/scripts/contrib/test_build_time_worker.sh +++ b/scripts/contrib/test_build_time_worker.sh | |||
@@ -1,5 +1,7 @@ | |||
1 | #!/bin/bash | 1 | #!/bin/bash |
2 | # | 2 | # |
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 6 | # |
5 | # This is an example script to be used in conjunction with test_build_time.sh | 7 | # This is an example script to be used in conjunction with test_build_time.sh |
diff --git a/scripts/contrib/verify-homepage.py b/scripts/contrib/verify-homepage.py index 7bffa78e23..a90b5010bc 100755 --- a/scripts/contrib/verify-homepage.py +++ b/scripts/contrib/verify-homepage.py | |||
@@ -1,5 +1,7 @@ | |||
1 | #!/usr/bin/env python3 | 1 | #!/usr/bin/env python3 |
2 | # | 2 | # |
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
3 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
4 | # | 6 | # |
5 | # This script can be used to verify HOMEPAGE values for all recipes in | 7 | # This script can be used to verify HOMEPAGE values for all recipes in |