diff options
Diffstat (limited to 'scripts/lib')
-rwxr-xr-x | scripts/lib/devtool/ide_sdk.py | 63 | ||||
-rw-r--r-- | scripts/lib/devtool/standard.py | 3 | ||||
-rwxr-xr-x | scripts/lib/resulttool/manualexecution.py | 2 | ||||
-rw-r--r-- | scripts/lib/resulttool/report.py | 2 | ||||
-rw-r--r-- | scripts/lib/resulttool/resultutils.py | 76 | ||||
-rw-r--r-- | scripts/lib/resulttool/store.py | 26 | ||||
-rw-r--r-- | scripts/lib/wic/plugins/source/bootimg-efi.py | 2 |
7 files changed, 110 insertions, 64 deletions
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py index 65873b088d..0b50165a12 100755 --- a/scripts/lib/devtool/ide_sdk.py +++ b/scripts/lib/devtool/ide_sdk.py | |||
@@ -288,6 +288,7 @@ class RecipeModified: | |||
288 | self.bblayers = None | 288 | self.bblayers = None |
289 | self.bpn = None | 289 | self.bpn = None |
290 | self.d = None | 290 | self.d = None |
291 | self.debug_build = None | ||
291 | self.fakerootcmd = None | 292 | self.fakerootcmd = None |
292 | self.fakerootenv = None | 293 | self.fakerootenv = None |
293 | self.libdir = None | 294 | self.libdir = None |
@@ -348,6 +349,7 @@ class RecipeModified: | |||
348 | self.bpn = recipe_d.getVar('BPN') | 349 | self.bpn = recipe_d.getVar('BPN') |
349 | self.cxx = recipe_d.getVar('CXX') | 350 | self.cxx = recipe_d.getVar('CXX') |
350 | self.d = recipe_d.getVar('D') | 351 | self.d = recipe_d.getVar('D') |
352 | self.debug_build = recipe_d.getVar('DEBUG_BUILD') | ||
351 | self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD') | 353 | self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD') |
352 | self.fakerootenv = recipe_d.getVar('FAKEROOTENV') | 354 | self.fakerootenv = recipe_d.getVar('FAKEROOTENV') |
353 | self.libdir = recipe_d.getVar('libdir') | 355 | self.libdir = recipe_d.getVar('libdir') |
@@ -389,17 +391,6 @@ class RecipeModified: | |||
389 | self.recipe_id = self.bpn + "-" + self.package_arch | 391 | self.recipe_id = self.bpn + "-" + self.package_arch |
390 | self.recipe_id_pretty = self.bpn + ": " + self.package_arch | 392 | self.recipe_id_pretty = self.bpn + ": " + self.package_arch |
391 | 393 | ||
392 | def append_to_bbappend(self, append_text): | ||
393 | with open(self.bbappend, 'a') as bbap: | ||
394 | bbap.write(append_text) | ||
395 | |||
396 | def remove_from_bbappend(self, append_text): | ||
397 | with open(self.bbappend, 'r') as bbap: | ||
398 | text = bbap.read() | ||
399 | new_text = text.replace(append_text, '') | ||
400 | with open(self.bbappend, 'w') as bbap: | ||
401 | bbap.write(new_text) | ||
402 | |||
403 | @staticmethod | 394 | @staticmethod |
404 | def is_valid_shell_variable(var): | 395 | def is_valid_shell_variable(var): |
405 | """Skip strange shell variables like systemd | 396 | """Skip strange shell variables like systemd |
@@ -412,34 +403,6 @@ class RecipeModified: | |||
412 | return True | 403 | return True |
413 | return False | 404 | return False |
414 | 405 | ||
415 | def debug_build_config(self, args): | ||
416 | """Explicitely set for example CMAKE_BUILD_TYPE to Debug if not defined otherwise""" | ||
417 | if self.build_tool is BuildTool.CMAKE: | ||
418 | append_text = os.linesep + \ | ||
419 | 'OECMAKE_ARGS:append = " -DCMAKE_BUILD_TYPE:STRING=Debug"' + os.linesep | ||
420 | if args.debug_build_config and not 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars: | ||
421 | self.cmake_cache_vars['CMAKE_BUILD_TYPE'] = { | ||
422 | "type": "STRING", | ||
423 | "value": "Debug", | ||
424 | } | ||
425 | self.append_to_bbappend(append_text) | ||
426 | elif 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars: | ||
427 | del self.cmake_cache_vars['CMAKE_BUILD_TYPE'] | ||
428 | self.remove_from_bbappend(append_text) | ||
429 | elif self.build_tool is BuildTool.MESON: | ||
430 | append_text = os.linesep + 'MESON_BUILDTYPE = "debug"' + os.linesep | ||
431 | if args.debug_build_config and self.meson_buildtype != "debug": | ||
432 | self.mesonopts.replace( | ||
433 | '--buildtype ' + self.meson_buildtype, '--buildtype debug') | ||
434 | self.append_to_bbappend(append_text) | ||
435 | elif self.meson_buildtype == "debug": | ||
436 | self.mesonopts.replace( | ||
437 | '--buildtype debug', '--buildtype plain') | ||
438 | self.remove_from_bbappend(append_text) | ||
439 | elif args.debug_build_config: | ||
440 | logger.warn( | ||
441 | "--debug-build-config is not implemented for this build tool yet.") | ||
442 | |||
443 | def solib_search_path(self, image): | 406 | def solib_search_path(self, image): |
444 | """Search for debug symbols in the rootfs and rootfs-dbg | 407 | """Search for debug symbols in the rootfs and rootfs-dbg |
445 | 408 | ||
@@ -493,7 +456,7 @@ class RecipeModified: | |||
493 | 456 | ||
494 | vars = (key for key in d.keys() if not key.startswith( | 457 | vars = (key for key in d.keys() if not key.startswith( |
495 | "__") and not d.getVarFlag(key, "func", False)) | 458 | "__") and not d.getVarFlag(key, "func", False)) |
496 | for var in vars: | 459 | for var in sorted(vars): |
497 | func = d.getVarFlag(var, "func", False) | 460 | func = d.getVarFlag(var, "func", False) |
498 | if d.getVarFlag(var, 'python', False) and func: | 461 | if d.getVarFlag(var, 'python', False) and func: |
499 | continue | 462 | continue |
@@ -545,7 +508,7 @@ class RecipeModified: | |||
545 | cache_vars = {} | 508 | cache_vars = {} |
546 | oecmake_args = d.getVar('OECMAKE_ARGS').split() | 509 | oecmake_args = d.getVar('OECMAKE_ARGS').split() |
547 | extra_oecmake = d.getVar('EXTRA_OECMAKE').split() | 510 | extra_oecmake = d.getVar('EXTRA_OECMAKE').split() |
548 | for param in oecmake_args + extra_oecmake: | 511 | for param in sorted(oecmake_args + extra_oecmake): |
549 | d_pref = "-D" | 512 | d_pref = "-D" |
550 | if param.startswith(d_pref): | 513 | if param.startswith(d_pref): |
551 | param = param[len(d_pref):] | 514 | param = param[len(d_pref):] |
@@ -988,6 +951,13 @@ def ide_setup(args, config, basepath, workspace): | |||
988 | recipe_modified.gen_meson_wrapper() | 951 | recipe_modified.gen_meson_wrapper() |
989 | ide.setup_modified_recipe( | 952 | ide.setup_modified_recipe( |
990 | args, recipe_image, recipe_modified) | 953 | args, recipe_image, recipe_modified) |
954 | |||
955 | if recipe_modified.debug_build != '1': | ||
956 | logger.warn( | ||
957 | 'Recipe %s is compiled with release build configuration. ' | ||
958 | 'You might want to add DEBUG_BUILD = "1" to %s. ' | ||
959 | 'Note that devtool modify --debug-build can do this automatically.', | ||
960 | recipe_modified.name, recipe_modified.bbappend) | ||
991 | else: | 961 | else: |
992 | raise DevtoolError("Must not end up here.") | 962 | raise DevtoolError("Must not end up here.") |
993 | 963 | ||
@@ -995,6 +965,15 @@ def ide_setup(args, config, basepath, workspace): | |||
995 | def register_commands(subparsers, context): | 965 | def register_commands(subparsers, context): |
996 | """Register devtool subcommands from this plugin""" | 966 | """Register devtool subcommands from this plugin""" |
997 | 967 | ||
968 | # The ide-sdk command bootstraps the SDK from the bitbake environment before the IDE | ||
969 | # configuration is generated. In the case of the eSDK, the bootstrapping is performed | ||
970 | # during the installation of the eSDK installer. Running the ide-sdk plugin from an | ||
971 | # eSDK installer-based setup would require skipping the bootstrapping and probably | ||
972 | # taking some other differences into account when generating the IDE configurations. | ||
973 | # This would be possible. But it is not implemented. | ||
974 | if context.fixed_setup: | ||
975 | return | ||
976 | |||
998 | global ide_plugins | 977 | global ide_plugins |
999 | 978 | ||
1000 | # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins. | 979 | # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins. |
@@ -1065,6 +1044,4 @@ def register_commands(subparsers, context): | |||
1065 | '-p', '--no-preserve', help='Do not preserve existing files', action='store_true') | 1044 | '-p', '--no-preserve', help='Do not preserve existing files', action='store_true') |
1066 | parser_ide_sdk.add_argument( | 1045 | parser_ide_sdk.add_argument( |
1067 | '--no-check-space', help='Do not check for available space before deploying', action='store_true') | 1046 | '--no-check-space', help='Do not check for available space before deploying', action='store_true') |
1068 | parser_ide_sdk.add_argument( | ||
1069 | '--debug-build-config', help='Use debug build flags, for example set CMAKE_BUILD_TYPE=Debug', action='store_true') | ||
1070 | parser_ide_sdk.set_defaults(func=ide_setup) | 1047 | parser_ide_sdk.set_defaults(func=ide_setup) |
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index 05161942b7..908869cc4f 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py | |||
@@ -1031,6 +1031,8 @@ def modify(args, config, basepath, workspace): | |||
1031 | if branch == args.branch: | 1031 | if branch == args.branch: |
1032 | continue | 1032 | continue |
1033 | f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) | 1033 | f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) |
1034 | if args.debug_build: | ||
1035 | f.write('\nDEBUG_BUILD = "1"\n') | ||
1034 | 1036 | ||
1035 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) | 1037 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) |
1036 | 1038 | ||
@@ -2396,6 +2398,7 @@ def register_commands(subparsers, context): | |||
2396 | parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') | 2398 | parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') |
2397 | parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') | 2399 | parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') |
2398 | parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") | 2400 | parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") |
2401 | parser_modify.add_argument('--debug-build', action="store_true", help='Add DEBUG_BUILD = "1" to the modified recipe') | ||
2399 | parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) | 2402 | parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) |
2400 | 2403 | ||
2401 | parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', | 2404 | parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', |
diff --git a/scripts/lib/resulttool/manualexecution.py b/scripts/lib/resulttool/manualexecution.py index ecb27c5933..ae0861ac6b 100755 --- a/scripts/lib/resulttool/manualexecution.py +++ b/scripts/lib/resulttool/manualexecution.py | |||
@@ -22,7 +22,7 @@ def load_json_file(f): | |||
22 | def write_json_file(f, json_data): | 22 | def write_json_file(f, json_data): |
23 | os.makedirs(os.path.dirname(f), exist_ok=True) | 23 | os.makedirs(os.path.dirname(f), exist_ok=True) |
24 | with open(f, 'w') as filedata: | 24 | with open(f, 'w') as filedata: |
25 | filedata.write(json.dumps(json_data, sort_keys=True, indent=4)) | 25 | filedata.write(json.dumps(json_data, sort_keys=True, indent=1)) |
26 | 26 | ||
27 | class ManualTestRunner(object): | 27 | class ManualTestRunner(object): |
28 | 28 | ||
diff --git a/scripts/lib/resulttool/report.py b/scripts/lib/resulttool/report.py index a349510ab8..1c100b00ab 100644 --- a/scripts/lib/resulttool/report.py +++ b/scripts/lib/resulttool/report.py | |||
@@ -256,7 +256,7 @@ class ResultsTextReport(object): | |||
256 | if selected_test_case_only: | 256 | if selected_test_case_only: |
257 | print_selected_testcase_result(raw_results, selected_test_case_only) | 257 | print_selected_testcase_result(raw_results, selected_test_case_only) |
258 | else: | 258 | else: |
259 | print(json.dumps(raw_results, sort_keys=True, indent=4)) | 259 | print(json.dumps(raw_results, sort_keys=True, indent=1)) |
260 | else: | 260 | else: |
261 | print('Could not find raw test result for %s' % raw_test) | 261 | print('Could not find raw test result for %s' % raw_test) |
262 | return 0 | 262 | return 0 |
diff --git a/scripts/lib/resulttool/resultutils.py b/scripts/lib/resulttool/resultutils.py index c5521d81bd..b8fc79a6ac 100644 --- a/scripts/lib/resulttool/resultutils.py +++ b/scripts/lib/resulttool/resultutils.py | |||
@@ -14,8 +14,11 @@ import scriptpath | |||
14 | import copy | 14 | import copy |
15 | import urllib.request | 15 | import urllib.request |
16 | import posixpath | 16 | import posixpath |
17 | import logging | ||
17 | scriptpath.add_oe_lib_path() | 18 | scriptpath.add_oe_lib_path() |
18 | 19 | ||
20 | logger = logging.getLogger('resulttool') | ||
21 | |||
19 | flatten_map = { | 22 | flatten_map = { |
20 | "oeselftest": [], | 23 | "oeselftest": [], |
21 | "runtime": [], | 24 | "runtime": [], |
@@ -31,13 +34,19 @@ regression_map = { | |||
31 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE'] | 34 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE'] |
32 | } | 35 | } |
33 | store_map = { | 36 | store_map = { |
34 | "oeselftest": ['TEST_TYPE'], | 37 | "oeselftest": ['TEST_TYPE', 'TESTSERIES', 'MACHINE'], |
35 | "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'], | 38 | "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'], |
36 | "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], | 39 | "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], |
37 | "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], | 40 | "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'], |
38 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME'] | 41 | "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME'] |
39 | } | 42 | } |
40 | 43 | ||
44 | rawlog_sections = { | ||
45 | "ptestresult.rawlogs": "ptest", | ||
46 | "ltpresult.rawlogs": "ltp", | ||
47 | "ltpposixresult.rawlogs": "ltpposix" | ||
48 | } | ||
49 | |||
41 | def is_url(p): | 50 | def is_url(p): |
42 | """ | 51 | """ |
43 | Helper for determining if the given path is a URL | 52 | Helper for determining if the given path is a URL |
@@ -108,21 +117,57 @@ def filter_resultsdata(results, resultid): | |||
108 | newresults[r][i] = results[r][i] | 117 | newresults[r][i] = results[r][i] |
109 | return newresults | 118 | return newresults |
110 | 119 | ||
111 | def strip_ptestresults(results): | 120 | def strip_logs(results): |
112 | newresults = copy.deepcopy(results) | 121 | newresults = copy.deepcopy(results) |
113 | #for a in newresults2: | ||
114 | # newresults = newresults2[a] | ||
115 | for res in newresults: | 122 | for res in newresults: |
116 | if 'result' not in newresults[res]: | 123 | if 'result' not in newresults[res]: |
117 | continue | 124 | continue |
118 | if 'ptestresult.rawlogs' in newresults[res]['result']: | 125 | for logtype in rawlog_sections: |
119 | del newresults[res]['result']['ptestresult.rawlogs'] | 126 | if logtype in newresults[res]['result']: |
127 | del newresults[res]['result'][logtype] | ||
120 | if 'ptestresult.sections' in newresults[res]['result']: | 128 | if 'ptestresult.sections' in newresults[res]['result']: |
121 | for i in newresults[res]['result']['ptestresult.sections']: | 129 | for i in newresults[res]['result']['ptestresult.sections']: |
122 | if 'log' in newresults[res]['result']['ptestresult.sections'][i]: | 130 | if 'log' in newresults[res]['result']['ptestresult.sections'][i]: |
123 | del newresults[res]['result']['ptestresult.sections'][i]['log'] | 131 | del newresults[res]['result']['ptestresult.sections'][i]['log'] |
124 | return newresults | 132 | return newresults |
125 | 133 | ||
134 | # For timing numbers, crazy amounts of precision don't make sense and just confuse | ||
135 | # the logs. For numbers over 1, trim to 3 decimal places, for numbers less than 1, | ||
136 | # trim to 4 significant digits | ||
137 | def trim_durations(results): | ||
138 | for res in results: | ||
139 | if 'result' not in results[res]: | ||
140 | continue | ||
141 | for entry in results[res]['result']: | ||
142 | if 'duration' in results[res]['result'][entry]: | ||
143 | duration = results[res]['result'][entry]['duration'] | ||
144 | if duration > 1: | ||
145 | results[res]['result'][entry]['duration'] = float("%.3f" % duration) | ||
146 | elif duration < 1: | ||
147 | results[res]['result'][entry]['duration'] = float("%.4g" % duration) | ||
148 | return results | ||
149 | |||
150 | def handle_cleanups(results): | ||
151 | # Remove pointless path duplication from old format reproducibility results | ||
152 | for res2 in results: | ||
153 | try: | ||
154 | section = results[res2]['result']['reproducible']['files'] | ||
155 | for pkgtype in section: | ||
156 | for filelist in section[pkgtype].copy(): | ||
157 | if section[pkgtype][filelist] and type(section[pkgtype][filelist][0]) == dict: | ||
158 | newlist = [] | ||
159 | for entry in section[pkgtype][filelist]: | ||
160 | newlist.append(entry["reference"].split("/./")[1]) | ||
161 | section[pkgtype][filelist] = newlist | ||
162 | |||
163 | except KeyError: | ||
164 | pass | ||
165 | # Remove pointless duplicate rawlogs data | ||
166 | try: | ||
167 | del results[res2]['result']['reproducible.rawlogs'] | ||
168 | except KeyError: | ||
169 | pass | ||
170 | |||
126 | def decode_log(logdata): | 171 | def decode_log(logdata): |
127 | if isinstance(logdata, str): | 172 | if isinstance(logdata, str): |
128 | return logdata | 173 | return logdata |
@@ -155,9 +200,6 @@ def generic_get_rawlogs(sectname, results): | |||
155 | return None | 200 | return None |
156 | return decode_log(results[sectname]['log']) | 201 | return decode_log(results[sectname]['log']) |
157 | 202 | ||
158 | def ptestresult_get_rawlogs(results): | ||
159 | return generic_get_rawlogs('ptestresult.rawlogs', results) | ||
160 | |||
161 | def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False): | 203 | def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False): |
162 | for res in results: | 204 | for res in results: |
163 | if res: | 205 | if res: |
@@ -167,16 +209,20 @@ def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, p | |||
167 | os.makedirs(os.path.dirname(dst), exist_ok=True) | 209 | os.makedirs(os.path.dirname(dst), exist_ok=True) |
168 | resultsout = results[res] | 210 | resultsout = results[res] |
169 | if not ptestjson: | 211 | if not ptestjson: |
170 | resultsout = strip_ptestresults(results[res]) | 212 | resultsout = strip_logs(results[res]) |
213 | trim_durations(resultsout) | ||
214 | handle_cleanups(resultsout) | ||
171 | with open(dst, 'w') as f: | 215 | with open(dst, 'w') as f: |
172 | f.write(json.dumps(resultsout, sort_keys=True, indent=4)) | 216 | f.write(json.dumps(resultsout, sort_keys=True, indent=1)) |
173 | for res2 in results[res]: | 217 | for res2 in results[res]: |
174 | if ptestlogs and 'result' in results[res][res2]: | 218 | if ptestlogs and 'result' in results[res][res2]: |
175 | seriesresults = results[res][res2]['result'] | 219 | seriesresults = results[res][res2]['result'] |
176 | rawlogs = ptestresult_get_rawlogs(seriesresults) | 220 | for logtype in rawlog_sections: |
177 | if rawlogs is not None: | 221 | logdata = generic_get_rawlogs(logtype, seriesresults) |
178 | with open(dst.replace(fn, "ptest-raw.log"), "w+") as f: | 222 | if logdata is not None: |
179 | f.write(rawlogs) | 223 | logger.info("Extracting " + rawlog_sections[logtype] + "-raw.log") |
224 | with open(dst.replace(fn, rawlog_sections[logtype] + "-raw.log"), "w+") as f: | ||
225 | f.write(logdata) | ||
180 | if 'ptestresult.sections' in seriesresults: | 226 | if 'ptestresult.sections' in seriesresults: |
181 | for i in seriesresults['ptestresult.sections']: | 227 | for i in seriesresults['ptestresult.sections']: |
182 | sectionlog = ptestresult_get_log(seriesresults, i) | 228 | sectionlog = ptestresult_get_log(seriesresults, i) |
diff --git a/scripts/lib/resulttool/store.py b/scripts/lib/resulttool/store.py index e0951f0a8f..578910d234 100644 --- a/scripts/lib/resulttool/store.py +++ b/scripts/lib/resulttool/store.py | |||
@@ -65,18 +65,34 @@ def store(args, logger): | |||
65 | 65 | ||
66 | for r in revisions: | 66 | for r in revisions: |
67 | results = revisions[r] | 67 | results = revisions[r] |
68 | if args.revision and r[0] != args.revision: | ||
69 | logger.info('skipping %s as non-matching' % r[0]) | ||
70 | continue | ||
68 | keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]} | 71 | keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]} |
69 | subprocess.check_call(["find", tempdir, "!", "-path", "./.git/*", "-delete"]) | 72 | subprocess.check_call(["find", tempdir, "-name", "testresults.json", "!", "-path", "./.git/*", "-delete"]) |
70 | resultutils.save_resultsdata(results, tempdir, ptestlogs=True) | 73 | resultutils.save_resultsdata(results, tempdir, ptestlogs=True) |
71 | 74 | ||
72 | logger.info('Storing test result into git repository %s' % args.git_dir) | 75 | logger.info('Storing test result into git repository %s' % args.git_dir) |
73 | 76 | ||
74 | gitarchive.gitarchive(tempdir, args.git_dir, False, False, | 77 | excludes = [] |
78 | if args.logfile_archive: | ||
79 | excludes = ['*.log', "*.log.zst"] | ||
80 | |||
81 | tagname = gitarchive.gitarchive(tempdir, args.git_dir, False, False, | ||
75 | "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", | 82 | "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}", |
76 | False, "{branch}/{commit_count}-g{commit}/{tag_number}", | 83 | False, "{branch}/{commit_count}-g{commit}/{tag_number}", |
77 | 'Test run #{tag_number} of {branch}:{commit}', '', | 84 | 'Test run #{tag_number} of {branch}:{commit}', '', |
78 | [], [], False, keywords, logger) | 85 | excludes, [], False, keywords, logger) |
79 | 86 | ||
87 | if args.logfile_archive: | ||
88 | logdir = args.logfile_archive + "/" + tagname | ||
89 | shutil.copytree(tempdir, logdir) | ||
90 | for root, dirs, files in os.walk(logdir): | ||
91 | for name in files: | ||
92 | if not name.endswith(".log"): | ||
93 | continue | ||
94 | f = os.path.join(root, name) | ||
95 | subprocess.run(["zstd", f, "--rm"], check=True, capture_output=True) | ||
80 | finally: | 96 | finally: |
81 | subprocess.check_call(["rm", "-rf", tempdir]) | 97 | subprocess.check_call(["rm", "-rf", tempdir]) |
82 | 98 | ||
@@ -102,3 +118,7 @@ def register_commands(subparsers): | |||
102 | help='add executed-by configuration to each result file') | 118 | help='add executed-by configuration to each result file') |
103 | parser_build.add_argument('-t', '--extra-test-env', default='', | 119 | parser_build.add_argument('-t', '--extra-test-env', default='', |
104 | help='add extra test environment data to each result file configuration') | 120 | help='add extra test environment data to each result file configuration') |
121 | parser_build.add_argument('-r', '--revision', default='', | ||
122 | help='only store data for the specified revision') | ||
123 | parser_build.add_argument('-l', '--logfile-archive', default='', | ||
124 | help='directory to separately archive log files along with a copy of the results') | ||
diff --git a/scripts/lib/wic/plugins/source/bootimg-efi.py b/scripts/lib/wic/plugins/source/bootimg-efi.py index 7cc5131541..37d07093f5 100644 --- a/scripts/lib/wic/plugins/source/bootimg-efi.py +++ b/scripts/lib/wic/plugins/source/bootimg-efi.py | |||
@@ -245,7 +245,7 @@ class BootimgEFIPlugin(SourcePlugin): | |||
245 | 245 | ||
246 | # list of tuples (src_name, dst_name) | 246 | # list of tuples (src_name, dst_name) |
247 | deploy_files = [] | 247 | deploy_files = [] |
248 | for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): | 248 | for src_entry in re.findall(r'[\w;\-\.\+/\*]+', boot_files): |
249 | if ';' in src_entry: | 249 | if ';' in src_entry: |
250 | dst_entry = tuple(src_entry.split(';')) | 250 | dst_entry = tuple(src_entry.split(';')) |
251 | if not dst_entry[0] or not dst_entry[1]: | 251 | if not dst_entry[0] or not dst_entry[1]: |