diff options
author | Markus Lehtonen <markus.lehtonen@linux.intel.com> | 2016-10-27 17:52:37 +0300 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2017-01-23 12:05:21 +0000 |
commit | 07c245792e4b3306c29a017dbbed63200190bea5 (patch) | |
tree | 0277a8072b4aa2237b600159f9ee9c1453226145 /meta | |
parent | 96fcb0f7b4a8a59a1620e48a65a9cda8cf03d71f (diff) | |
download | poky-07c245792e4b3306c29a017dbbed63200190bea5.tar.gz |
oe-build-perf-test: enable xml reporting
Add --xml command line option to oe-build-perf-test script for producing
a test report in JUnit XML format instead of JSON.
[YOCTO #10590]
(From OE-Core rev: 21ae1c491b93675254b7733640662b566ed76f98)
Signed-off-by: Markus Lehtonen <markus.lehtonen@linux.intel.com>
Signed-off-by: Ross Burton <ross.burton@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta')
-rw-r--r-- | meta/lib/oeqa/buildperf/base.py | 43 |
1 files changed, 42 insertions, 1 deletions
diff --git a/meta/lib/oeqa/buildperf/base.py b/meta/lib/oeqa/buildperf/base.py index 668e822018..de0ee40a23 100644 --- a/meta/lib/oeqa/buildperf/base.py +++ b/meta/lib/oeqa/buildperf/base.py | |||
@@ -21,10 +21,12 @@ import socket | |||
21 | import time | 21 | import time |
22 | import traceback | 22 | import traceback |
23 | import unittest | 23 | import unittest |
24 | import xml.etree.ElementTree as ET | ||
24 | from datetime import datetime, timedelta | 25 | from datetime import datetime, timedelta |
25 | from functools import partial | 26 | from functools import partial |
26 | from multiprocessing import Process | 27 | from multiprocessing import Process |
27 | from multiprocessing import SimpleQueue | 28 | from multiprocessing import SimpleQueue |
29 | from xml.dom import minidom | ||
28 | 30 | ||
29 | import oe.path | 31 | import oe.path |
30 | from oeqa.utils.commands import CommandError, runCmd, get_bb_vars | 32 | from oeqa.utils.commands import CommandError, runCmd, get_bb_vars |
@@ -169,7 +171,6 @@ class BuildPerfTestResult(unittest.TextTestResult): | |||
169 | def stopTestRun(self): | 171 | def stopTestRun(self): |
170 | """Pre-run hook""" | 172 | """Pre-run hook""" |
171 | self.elapsed_time = datetime.utcnow() - self.start_time | 173 | self.elapsed_time = datetime.utcnow() - self.start_time |
172 | self.write_results_json() | ||
173 | 174 | ||
174 | def all_results(self): | 175 | def all_results(self): |
175 | result_map = {'SUCCESS': self.successes, | 176 | result_map = {'SUCCESS': self.successes, |
@@ -254,6 +255,46 @@ class BuildPerfTestResult(unittest.TextTestResult): | |||
254 | json.dump(results, fobj, indent=4, sort_keys=True, | 255 | json.dump(results, fobj, indent=4, sort_keys=True, |
255 | cls=ResultsJsonEncoder) | 256 | cls=ResultsJsonEncoder) |
256 | 257 | ||
258 | def write_results_xml(self): | ||
259 | """Write test results into a JUnit XML file""" | ||
260 | top = ET.Element('testsuites') | ||
261 | suite = ET.SubElement(top, 'testsuite') | ||
262 | suite.set('name', 'oeqa.buildperf') | ||
263 | suite.set('timestamp', self.start_time.isoformat()) | ||
264 | suite.set('time', str(self.elapsed_time.total_seconds())) | ||
265 | suite.set('hostname', self.hostname) | ||
266 | suite.set('failures', str(len(self.failures) + len(self.expectedFailures))) | ||
267 | suite.set('errors', str(len(self.errors))) | ||
268 | suite.set('skipped', str(len(self.skipped))) | ||
269 | |||
270 | test_cnt = 0 | ||
271 | for status, (test, reason) in self.all_results(): | ||
272 | testcase = ET.SubElement(suite, 'testcase') | ||
273 | testcase.set('classname', test.__module__ + '.' + test.__class__.__name__) | ||
274 | testcase.set('name', test.name) | ||
275 | testcase.set('timestamp', test.start_time.isoformat()) | ||
276 | testcase.set('time', str(test.elapsed_time.total_seconds())) | ||
277 | if status in ('ERROR', 'FAILURE', 'EXP_FAILURE'): | ||
278 | if status in ('FAILURE', 'EXP_FAILURE'): | ||
279 | result = ET.SubElement(testcase, 'failure') | ||
280 | else: | ||
281 | result = ET.SubElement(testcase, 'error') | ||
282 | result.set('message', str(test.err[1])) | ||
283 | result.set('type', test.err[0].__name__) | ||
284 | result.text = reason | ||
285 | elif status == 'SKIPPED': | ||
286 | result = ET.SubElement(testcase, 'skipped') | ||
287 | result.text = reason | ||
288 | elif status not in ('SUCCESS', 'UNEXPECTED_SUCCESS'): | ||
289 | raise TypeError("BUG: invalid test status '%s'" % status) | ||
290 | test_cnt += 1 | ||
291 | suite.set('tests', str(test_cnt)) | ||
292 | |||
293 | # Use minidom for pretty-printing | ||
294 | dom_doc = minidom.parseString(ET.tostring(top, 'utf-8')) | ||
295 | with open(os.path.join(self.out_dir, 'results.xml'), 'w') as fobj: | ||
296 | dom_doc.writexml(fobj, addindent=' ', newl='\n', encoding='utf-8') | ||
297 | return | ||
257 | 298 | ||
258 | def git_commit_results(self, repo_path, branch=None, tag=None): | 299 | def git_commit_results(self, repo_path, branch=None, tag=None): |
259 | """Commit results into a Git repository""" | 300 | """Commit results into a Git repository""" |