diff options
author | Nathan Rossi <nathan@nathanrossi.com> | 2019-09-11 14:13:07 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2019-09-16 23:02:43 +0100 |
commit | 05e583ae84db3d2c0c6331c5e964779478f8fd21 (patch) | |
tree | 64c188ca061dd3d73cefae6dae1d766cd3256bd4 /scripts | |
parent | 745e38ff0f95a1e77eacea85184c0c7b20a9feb3 (diff) | |
download | poky-05e583ae84db3d2c0c6331c5e964779478f8fd21.tar.gz |
resulttool: Handle multiple series containing ptestresults
Handle multiple results series having ptestresults content. The contents
are merged on a per-result basis where duplicates are ignored (with a
warning message printed). The 'ptestresults.sections' collection is also
merged on a per-suite basis.
(From OE-Core rev: 47edd51970ed0c33edbe04fd72abd1cfc6ecd3d1)
Signed-off-by: Nathan Rossi <nathan@nathanrossi.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'scripts')
-rw-r--r-- | scripts/lib/resulttool/report.py | 44 |
1 files changed, 32 insertions, 12 deletions
diff --git a/scripts/lib/resulttool/report.py b/scripts/lib/resulttool/report.py index 8b03717d29..883b52517b 100644 --- a/scripts/lib/resulttool/report.py +++ b/scripts/lib/resulttool/report.py | |||
@@ -32,16 +32,22 @@ class ResultsTextReport(object): | |||
32 | # Ensure tests without any test results still show up on the report | 32 | # Ensure tests without any test results still show up on the report |
33 | for suite in result['ptestresult.sections']: | 33 | for suite in result['ptestresult.sections']: |
34 | if suite not in self.ptests[machine]: | 34 | if suite not in self.ptests[machine]: |
35 | self.ptests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []} | 35 | self.ptests[machine][suite] = { |
36 | 'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', | ||
37 | 'failed_testcases': [], "testcases": set(), | ||
38 | } | ||
36 | if 'duration' in result['ptestresult.sections'][suite]: | 39 | if 'duration' in result['ptestresult.sections'][suite]: |
37 | self.ptests[machine][suite]['duration'] = result['ptestresult.sections'][suite]['duration'] | 40 | self.ptests[machine][suite]['duration'] = result['ptestresult.sections'][suite]['duration'] |
38 | if 'timeout' in result['ptestresult.sections'][suite]: | 41 | if 'timeout' in result['ptestresult.sections'][suite]: |
39 | self.ptests[machine][suite]['duration'] += " T" | 42 | self.ptests[machine][suite]['duration'] += " T" |
40 | return | 43 | return True |
44 | |||
45 | # process test result | ||
41 | try: | 46 | try: |
42 | _, suite, test = k.split(".", 2) | 47 | _, suite, test = k.split(".", 2) |
43 | except ValueError: | 48 | except ValueError: |
44 | return | 49 | return True |
50 | |||
45 | # Handle 'glib-2.0' | 51 | # Handle 'glib-2.0' |
46 | if 'ptestresult.sections' in result and suite not in result['ptestresult.sections']: | 52 | if 'ptestresult.sections' in result and suite not in result['ptestresult.sections']: |
47 | try: | 53 | try: |
@@ -50,11 +56,23 @@ class ResultsTextReport(object): | |||
50 | suite = suite + "." + suite1 | 56 | suite = suite + "." + suite1 |
51 | except ValueError: | 57 | except ValueError: |
52 | pass | 58 | pass |
59 | |||
53 | if suite not in self.ptests[machine]: | 60 | if suite not in self.ptests[machine]: |
54 | self.ptests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []} | 61 | self.ptests[machine][suite] = { |
62 | 'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', | ||
63 | 'failed_testcases': [], "testcases": set(), | ||
64 | } | ||
65 | |||
66 | # do not process duplicate results | ||
67 | if test in self.ptests[machine][suite]["testcases"]: | ||
68 | print("Warning duplicate ptest result '{}.{}' for {}".format(suite, test, machine)) | ||
69 | return False | ||
70 | |||
55 | for tk in self.result_types: | 71 | for tk in self.result_types: |
56 | if status in self.result_types[tk]: | 72 | if status in self.result_types[tk]: |
57 | self.ptests[machine][suite][tk] += 1 | 73 | self.ptests[machine][suite][tk] += 1 |
74 | self.ptests[machine][suite]["testcases"].add(test) | ||
75 | return True | ||
58 | 76 | ||
59 | def handle_ltptest_result(self, k, status, result, machine): | 77 | def handle_ltptest_result(self, k, status, result, machine): |
60 | if machine not in self.ltptests: | 78 | if machine not in self.ltptests: |
@@ -124,17 +142,20 @@ class ResultsTextReport(object): | |||
124 | result = testresult.get('result', []) | 142 | result = testresult.get('result', []) |
125 | for k in result: | 143 | for k in result: |
126 | test_status = result[k].get('status', []) | 144 | test_status = result[k].get('status', []) |
145 | if k.startswith("ptestresult."): | ||
146 | if not self.handle_ptest_result(k, test_status, result, machine): | ||
147 | continue | ||
148 | elif k.startswith("ltpresult."): | ||
149 | self.handle_ltptest_result(k, test_status, result, machine) | ||
150 | elif k.startswith("ltpposixresult."): | ||
151 | self.handle_ltpposixtest_result(k, test_status, result, machine) | ||
152 | |||
153 | # process result if it was not skipped by a handler | ||
127 | for tk in self.result_types: | 154 | for tk in self.result_types: |
128 | if test_status in self.result_types[tk]: | 155 | if test_status in self.result_types[tk]: |
129 | test_count_report[tk] += 1 | 156 | test_count_report[tk] += 1 |
130 | if test_status in self.result_types['failed']: | 157 | if test_status in self.result_types['failed']: |
131 | test_count_report['failed_testcases'].append(k) | 158 | test_count_report['failed_testcases'].append(k) |
132 | if k.startswith("ptestresult."): | ||
133 | self.handle_ptest_result(k, test_status, result, machine) | ||
134 | if k.startswith("ltpresult."): | ||
135 | self.handle_ltptest_result(k, test_status, result, machine) | ||
136 | if k.startswith("ltpposixresult."): | ||
137 | self.handle_ltpposixtest_result(k, test_status, result, machine) | ||
138 | return test_count_report | 159 | return test_count_report |
139 | 160 | ||
140 | def print_test_report(self, template_file_name, test_count_reports): | 161 | def print_test_report(self, template_file_name, test_count_reports): |
@@ -210,8 +231,7 @@ class ResultsTextReport(object): | |||
210 | # Check to see if there is already results for these kinds of tests for the machine | 231 | # Check to see if there is already results for these kinds of tests for the machine |
211 | for key in result['result'].keys(): | 232 | for key in result['result'].keys(): |
212 | testtype = str(key).split('.')[0] | 233 | testtype = str(key).split('.')[0] |
213 | if ((machine in self.ptests and testtype == "ptestresult" and self.ptests[machine]) or | 234 | if ((machine in self.ltptests and testtype == "ltpiresult" and self.ltptests[machine]) or |
214 | (machine in self.ltptests and testtype == "ltpiresult" and self.ltptests[machine]) or | ||
215 | (machine in self.ltpposixtests and testtype == "ltpposixresult" and self.ltpposixtests[machine])): | 235 | (machine in self.ltpposixtests and testtype == "ltpposixresult" and self.ltpposixtests[machine])): |
216 | print("Already have test results for %s on %s, skipping %s" %(str(key).split('.')[0], machine, resultid)) | 236 | print("Already have test results for %s on %s, skipping %s" %(str(key).split('.')[0], machine, resultid)) |
217 | skip = True | 237 | skip = True |