diff options
Diffstat (limited to 'meta/lib/oeqa/buildperf/base.py')
-rw-r--r-- | meta/lib/oeqa/buildperf/base.py | 42 |
1 files changed, 24 insertions, 18 deletions
diff --git a/meta/lib/oeqa/buildperf/base.py b/meta/lib/oeqa/buildperf/base.py index dd473a0bdc..ffe42dc87b 100644 --- a/meta/lib/oeqa/buildperf/base.py +++ b/meta/lib/oeqa/buildperf/base.py | |||
@@ -16,6 +16,7 @@ import os | |||
16 | import re | 16 | import re |
17 | import resource | 17 | import resource |
18 | import socket | 18 | import socket |
19 | import shutil | ||
19 | import time | 20 | import time |
20 | import unittest | 21 | import unittest |
21 | import xml.etree.ElementTree as ET | 22 | import xml.etree.ElementTree as ET |
@@ -127,7 +128,6 @@ class BuildPerfTestResult(unittest.TextTestResult): | |||
127 | def startTest(self, test): | 128 | def startTest(self, test): |
128 | """Pre-test hook""" | 129 | """Pre-test hook""" |
129 | test.base_dir = self.out_dir | 130 | test.base_dir = self.out_dir |
130 | os.mkdir(test.out_dir) | ||
131 | log.info("Executing test %s: %s", test.name, test.shortDescription()) | 131 | log.info("Executing test %s: %s", test.name, test.shortDescription()) |
132 | self.stream.write(datetime.now().strftime("[%Y-%m-%d %H:%M:%S] ")) | 132 | self.stream.write(datetime.now().strftime("[%Y-%m-%d %H:%M:%S] ")) |
133 | super(BuildPerfTestResult, self).startTest(test) | 133 | super(BuildPerfTestResult, self).startTest(test) |
@@ -150,6 +150,16 @@ class BuildPerfTestResult(unittest.TextTestResult): | |||
150 | return sorted(compound, key=lambda info: info[1].start_time) | 150 | return sorted(compound, key=lambda info: info[1].start_time) |
151 | 151 | ||
152 | 152 | ||
153 | def write_buildstats_json(self): | ||
154 | """Write buildstats file""" | ||
155 | buildstats = OrderedDict() | ||
156 | for _, test, _ in self.all_results(): | ||
157 | for key, val in test.buildstats.items(): | ||
158 | buildstats[test.name + '.' + key] = val | ||
159 | with open(os.path.join(self.out_dir, 'buildstats.json'), 'w') as fobj: | ||
160 | json.dump(buildstats, fobj, cls=ResultsJsonEncoder) | ||
161 | |||
162 | |||
153 | def write_results_json(self): | 163 | def write_results_json(self): |
154 | """Write test results into a json-formatted file""" | 164 | """Write test results into a json-formatted file""" |
155 | results = OrderedDict([('tester_host', self.hostname), | 165 | results = OrderedDict([('tester_host', self.hostname), |
@@ -221,8 +231,6 @@ class BuildPerfTestResult(unittest.TextTestResult): | |||
221 | ET.SubElement(measurement, 'time', | 231 | ET.SubElement(measurement, 'time', |
222 | timestamp=vals['start_time'].isoformat()).text = \ | 232 | timestamp=vals['start_time'].isoformat()).text = \ |
223 | str(vals['elapsed_time'].total_seconds()) | 233 | str(vals['elapsed_time'].total_seconds()) |
224 | if 'buildstats_file' in vals: | ||
225 | ET.SubElement(measurement, 'buildstats_file').text = vals['buildstats_file'] | ||
226 | attrib = dict((k, str(v)) for k, v in vals['iostat'].items()) | 234 | attrib = dict((k, str(v)) for k, v in vals['iostat'].items()) |
227 | ET.SubElement(measurement, 'iostat', attrib=attrib) | 235 | ET.SubElement(measurement, 'iostat', attrib=attrib) |
228 | attrib = dict((k, str(v)) for k, v in vals['rusage'].items()) | 236 | attrib = dict((k, str(v)) for k, v in vals['rusage'].items()) |
@@ -238,7 +246,6 @@ class BuildPerfTestResult(unittest.TextTestResult): | |||
238 | dom_doc = minidom.parseString(ET.tostring(top, 'utf-8')) | 246 | dom_doc = minidom.parseString(ET.tostring(top, 'utf-8')) |
239 | with open(os.path.join(self.out_dir, 'results.xml'), 'w') as fobj: | 247 | with open(os.path.join(self.out_dir, 'results.xml'), 'w') as fobj: |
240 | dom_doc.writexml(fobj, addindent=' ', newl='\n', encoding='utf-8') | 248 | dom_doc.writexml(fobj, addindent=' ', newl='\n', encoding='utf-8') |
241 | return | ||
242 | 249 | ||
243 | 250 | ||
244 | class BuildPerfTestCase(unittest.TestCase): | 251 | class BuildPerfTestCase(unittest.TestCase): |
@@ -254,6 +261,7 @@ class BuildPerfTestCase(unittest.TestCase): | |||
254 | self.start_time = None | 261 | self.start_time = None |
255 | self.elapsed_time = None | 262 | self.elapsed_time = None |
256 | self.measurements = OrderedDict() | 263 | self.measurements = OrderedDict() |
264 | self.buildstats = OrderedDict() | ||
257 | # self.err is supposed to be a tuple from sys.exc_info() | 265 | # self.err is supposed to be a tuple from sys.exc_info() |
258 | self.err = None | 266 | self.err = None |
259 | self.bb_vars = get_bb_vars() | 267 | self.bb_vars = get_bb_vars() |
@@ -263,17 +271,24 @@ class BuildPerfTestCase(unittest.TestCase): | |||
263 | self.sizes = [] | 271 | self.sizes = [] |
264 | 272 | ||
265 | @property | 273 | @property |
266 | def out_dir(self): | 274 | def tmp_dir(self): |
267 | return os.path.join(self.base_dir, self.name) | 275 | return os.path.join(self.base_dir, self.name + '.tmp') |
268 | 276 | ||
269 | def shortDescription(self): | 277 | def shortDescription(self): |
270 | return super(BuildPerfTestCase, self).shortDescription() or "" | 278 | return super(BuildPerfTestCase, self).shortDescription() or "" |
271 | 279 | ||
272 | def setUp(self): | 280 | def setUp(self): |
273 | """Set-up fixture for each test""" | 281 | """Set-up fixture for each test""" |
282 | if not os.path.isdir(self.tmp_dir): | ||
283 | os.mkdir(self.tmp_dir) | ||
274 | if self.build_target: | 284 | if self.build_target: |
275 | self.run_cmd(['bitbake', self.build_target, '-c', 'fetchall']) | 285 | self.run_cmd(['bitbake', self.build_target, '-c', 'fetchall']) |
276 | 286 | ||
287 | def tearDown(self): | ||
288 | """Tear-down fixture for each test""" | ||
289 | if os.path.isdir(self.tmp_dir): | ||
290 | shutil.rmtree(self.tmp_dir) | ||
291 | |||
277 | def run(self, *args, **kwargs): | 292 | def run(self, *args, **kwargs): |
278 | """Run test""" | 293 | """Run test""" |
279 | self.start_time = datetime.now() | 294 | self.start_time = datetime.now() |
@@ -349,9 +364,7 @@ class BuildPerfTestCase(unittest.TestCase): | |||
349 | ('rusage', data['rusage']), | 364 | ('rusage', data['rusage']), |
350 | ('iostat', data['iostat'])]) | 365 | ('iostat', data['iostat'])]) |
351 | if save_bs: | 366 | if save_bs: |
352 | bs_file = self.save_buildstats(legend) | 367 | self.save_buildstats(name) |
353 | measurement['values']['buildstats_file'] = \ | ||
354 | os.path.relpath(bs_file, self.base_dir) | ||
355 | 368 | ||
356 | self._append_measurement(measurement) | 369 | self._append_measurement(measurement) |
357 | 370 | ||
@@ -379,7 +392,7 @@ class BuildPerfTestCase(unittest.TestCase): | |||
379 | # Append to 'sizes' array for globalres log | 392 | # Append to 'sizes' array for globalres log |
380 | self.sizes.append(str(size)) | 393 | self.sizes.append(str(size)) |
381 | 394 | ||
382 | def save_buildstats(self, label=None): | 395 | def save_buildstats(self, measurement_name): |
383 | """Save buildstats""" | 396 | """Save buildstats""" |
384 | def split_nevr(nevr): | 397 | def split_nevr(nevr): |
385 | """Split name and version information from recipe "nevr" string""" | 398 | """Split name and version information from recipe "nevr" string""" |
@@ -451,14 +464,7 @@ class BuildPerfTestCase(unittest.TestCase): | |||
451 | task)) | 464 | task)) |
452 | buildstats.append(recipe_bs) | 465 | buildstats.append(recipe_bs) |
453 | 466 | ||
454 | # Write buildstats into json file | 467 | self.buildstats[measurement_name] = buildstats |
455 | postfix = '.' + str_to_fn(label) if label else '' | ||
456 | postfix += '.json' | ||
457 | outfile = os.path.join(self.out_dir, 'buildstats' + postfix) | ||
458 | with open(outfile, 'w') as fobj: | ||
459 | json.dump(buildstats, fobj, indent=4, sort_keys=True, | ||
460 | cls=ResultsJsonEncoder) | ||
461 | return outfile | ||
462 | 468 | ||
463 | def rm_tmp(self): | 469 | def rm_tmp(self): |
464 | """Cleanup temporary/intermediate files and directories""" | 470 | """Cleanup temporary/intermediate files and directories""" |