summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMarkus Lehtonen <markus.lehtonen@linux.intel.com>2016-10-28 10:19:45 +0300
committerRichard Purdie <richard.purdie@linuxfoundation.org>2017-01-23 12:05:21 +0000
commit4a26ceaecfcb17cbabf4cf9e1c995f0eafc2e41c (patch)
tree1156fcd37573892c79166df532a9687b8281dfe4
parent07c245792e4b3306c29a017dbbed63200190bea5 (diff)
downloadpoky-4a26ceaecfcb17cbabf4cf9e1c995f0eafc2e41c.tar.gz
oeqa.buildperf: extend xml format to contain measurement data
Make the xml report format slightly non-standard by incorporating measurement data into it. [YOCTO #10590] (From OE-Core rev: b7164d30fb125ff0c85a2ea508b0f1801aa57f66) Signed-off-by: Markus Lehtonen <markus.lehtonen@linux.intel.com> Signed-off-by: Ross Burton <ross.burton@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r--meta/lib/oeqa/buildperf/base.py23
1 files changed, 22 insertions, 1 deletions
diff --git a/meta/lib/oeqa/buildperf/base.py b/meta/lib/oeqa/buildperf/base.py
index de0ee40a23..efbe20c500 100644
--- a/meta/lib/oeqa/buildperf/base.py
+++ b/meta/lib/oeqa/buildperf/base.py
@@ -269,6 +269,7 @@ class BuildPerfTestResult(unittest.TextTestResult):
269 269
270 test_cnt = 0 270 test_cnt = 0
271 for status, (test, reason) in self.all_results(): 271 for status, (test, reason) in self.all_results():
272 test_cnt += 1
272 testcase = ET.SubElement(suite, 'testcase') 273 testcase = ET.SubElement(suite, 'testcase')
273 testcase.set('classname', test.__module__ + '.' + test.__class__.__name__) 274 testcase.set('classname', test.__module__ + '.' + test.__class__.__name__)
274 testcase.set('name', test.name) 275 testcase.set('name', test.name)
@@ -287,7 +288,27 @@ class BuildPerfTestResult(unittest.TextTestResult):
287 result.text = reason 288 result.text = reason
288 elif status not in ('SUCCESS', 'UNEXPECTED_SUCCESS'): 289 elif status not in ('SUCCESS', 'UNEXPECTED_SUCCESS'):
289 raise TypeError("BUG: invalid test status '%s'" % status) 290 raise TypeError("BUG: invalid test status '%s'" % status)
290 test_cnt += 1 291
292 for data in test.measurements:
293 measurement = ET.SubElement(testcase, data['type'])
294 measurement.set('name', data['name'])
295 measurement.set('legend', data['legend'])
296 vals = data['values']
297 if data['type'] == BuildPerfTestCase.SYSRES:
298 ET.SubElement(measurement, 'time',
299 timestamp=vals['start_time'].isoformat()).text = \
300 str(vals['elapsed_time'].total_seconds())
301 if 'buildstats_file' in vals:
302 ET.SubElement(measurement, 'buildstats_file').text = vals['buildstats_file']
303 attrib = dict((k, str(v)) for k, v in vals['iostat'].items())
304 ET.SubElement(measurement, 'iostat', attrib=attrib)
305 attrib = dict((k, str(v)) for k, v in vals['rusage'].items())
306 ET.SubElement(measurement, 'rusage', attrib=attrib)
307 elif data['type'] == BuildPerfTestCase.DISKUSAGE:
308 ET.SubElement(measurement, 'size').text = str(vals['size'])
309 else:
310 raise TypeError('BUG: unsupported measurement type')
311
291 suite.set('tests', str(test_cnt)) 312 suite.set('tests', str(test_cnt))
292 313
293 # Use minidom for pretty-printing 314 # Use minidom for pretty-printing