diff options
-rw-r--r-- | meta/lib/oeqa/buildperf/base.py | 43 | ||||
-rwxr-xr-x | scripts/oe-build-perf-test | 6 |
2 files changed, 48 insertions, 1 deletions
diff --git a/meta/lib/oeqa/buildperf/base.py b/meta/lib/oeqa/buildperf/base.py index 668e822018..de0ee40a23 100644 --- a/meta/lib/oeqa/buildperf/base.py +++ b/meta/lib/oeqa/buildperf/base.py | |||
@@ -21,10 +21,12 @@ import socket | |||
21 | import time | 21 | import time |
22 | import traceback | 22 | import traceback |
23 | import unittest | 23 | import unittest |
24 | import xml.etree.ElementTree as ET | ||
24 | from datetime import datetime, timedelta | 25 | from datetime import datetime, timedelta |
25 | from functools import partial | 26 | from functools import partial |
26 | from multiprocessing import Process | 27 | from multiprocessing import Process |
27 | from multiprocessing import SimpleQueue | 28 | from multiprocessing import SimpleQueue |
29 | from xml.dom import minidom | ||
28 | 30 | ||
29 | import oe.path | 31 | import oe.path |
30 | from oeqa.utils.commands import CommandError, runCmd, get_bb_vars | 32 | from oeqa.utils.commands import CommandError, runCmd, get_bb_vars |
@@ -169,7 +171,6 @@ class BuildPerfTestResult(unittest.TextTestResult): | |||
169 | def stopTestRun(self): | 171 | def stopTestRun(self): |
170 | """Pre-run hook""" | 172 | """Pre-run hook""" |
171 | self.elapsed_time = datetime.utcnow() - self.start_time | 173 | self.elapsed_time = datetime.utcnow() - self.start_time |
172 | self.write_results_json() | ||
173 | 174 | ||
174 | def all_results(self): | 175 | def all_results(self): |
175 | result_map = {'SUCCESS': self.successes, | 176 | result_map = {'SUCCESS': self.successes, |
@@ -254,6 +255,46 @@ class BuildPerfTestResult(unittest.TextTestResult): | |||
254 | json.dump(results, fobj, indent=4, sort_keys=True, | 255 | json.dump(results, fobj, indent=4, sort_keys=True, |
255 | cls=ResultsJsonEncoder) | 256 | cls=ResultsJsonEncoder) |
256 | 257 | ||
258 | def write_results_xml(self): | ||
259 | """Write test results into a JUnit XML file""" | ||
260 | top = ET.Element('testsuites') | ||
261 | suite = ET.SubElement(top, 'testsuite') | ||
262 | suite.set('name', 'oeqa.buildperf') | ||
263 | suite.set('timestamp', self.start_time.isoformat()) | ||
264 | suite.set('time', str(self.elapsed_time.total_seconds())) | ||
265 | suite.set('hostname', self.hostname) | ||
266 | suite.set('failures', str(len(self.failures) + len(self.expectedFailures))) | ||
267 | suite.set('errors', str(len(self.errors))) | ||
268 | suite.set('skipped', str(len(self.skipped))) | ||
269 | |||
270 | test_cnt = 0 | ||
271 | for status, (test, reason) in self.all_results(): | ||
272 | testcase = ET.SubElement(suite, 'testcase') | ||
273 | testcase.set('classname', test.__module__ + '.' + test.__class__.__name__) | ||
274 | testcase.set('name', test.name) | ||
275 | testcase.set('timestamp', test.start_time.isoformat()) | ||
276 | testcase.set('time', str(test.elapsed_time.total_seconds())) | ||
277 | if status in ('ERROR', 'FAILURE', 'EXP_FAILURE'): | ||
278 | if status in ('FAILURE', 'EXP_FAILURE'): | ||
279 | result = ET.SubElement(testcase, 'failure') | ||
280 | else: | ||
281 | result = ET.SubElement(testcase, 'error') | ||
282 | result.set('message', str(test.err[1])) | ||
283 | result.set('type', test.err[0].__name__) | ||
284 | result.text = reason | ||
285 | elif status == 'SKIPPED': | ||
286 | result = ET.SubElement(testcase, 'skipped') | ||
287 | result.text = reason | ||
288 | elif status not in ('SUCCESS', 'UNEXPECTED_SUCCESS'): | ||
289 | raise TypeError("BUG: invalid test status '%s'" % status) | ||
290 | test_cnt += 1 | ||
291 | suite.set('tests', str(test_cnt)) | ||
292 | |||
293 | # Use minidom for pretty-printing | ||
294 | dom_doc = minidom.parseString(ET.tostring(top, 'utf-8')) | ||
295 | with open(os.path.join(self.out_dir, 'results.xml'), 'w') as fobj: | ||
296 | dom_doc.writexml(fobj, addindent=' ', newl='\n', encoding='utf-8') | ||
297 | return | ||
257 | 298 | ||
258 | def git_commit_results(self, repo_path, branch=None, tag=None): | 299 | def git_commit_results(self, repo_path, branch=None, tag=None): |
259 | """Commit results into a Git repository""" | 300 | """Commit results into a Git repository""" |
diff --git a/scripts/oe-build-perf-test b/scripts/oe-build-perf-test index 638e195efb..4ec9f1403e 100755 --- a/scripts/oe-build-perf-test +++ b/scripts/oe-build-perf-test | |||
@@ -131,6 +131,8 @@ def parse_args(argv): | |||
131 | parser.add_argument('-o', '--out-dir', default='results-{date}', | 131 | parser.add_argument('-o', '--out-dir', default='results-{date}', |
132 | type=os.path.abspath, | 132 | type=os.path.abspath, |
133 | help="Output directory for test results") | 133 | help="Output directory for test results") |
134 | parser.add_argument('-x', '--xml', action='store_true', | ||
135 | help='Enable JUnit xml output') | ||
134 | parser.add_argument('--log-file', | 136 | parser.add_argument('--log-file', |
135 | default='{out_dir}/oe-build-perf-test.log', | 137 | default='{out_dir}/oe-build-perf-test.log', |
136 | help="Log file of this script") | 138 | help="Log file of this script") |
@@ -194,6 +196,10 @@ def main(argv=None): | |||
194 | # Restore logger output to stderr | 196 | # Restore logger output to stderr |
195 | log.handlers[0].setLevel(log.level) | 197 | log.handlers[0].setLevel(log.level) |
196 | 198 | ||
199 | if args.xml: | ||
200 | result.write_results_xml() | ||
201 | else: | ||
202 | result.write_results_json() | ||
197 | if args.globalres_file: | 203 | if args.globalres_file: |
198 | result.update_globalres_file(args.globalres_file) | 204 | result.update_globalres_file(args.globalres_file) |
199 | if args.commit_results: | 205 | if args.commit_results: |