diff options
author | Markus Lehtonen <markus.lehtonen@linux.intel.com> | 2017-01-27 16:54:53 +0200 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2017-02-15 20:06:41 -0800 |
commit | fcd28fd50f6f3768c41f1db184f55cd439699bd7 (patch) | |
tree | 9d182fdd08f21048dabe1711575d772952075f76 | |
parent | 8279d6f257440fdcc60d5668daa4081044370265 (diff) | |
download | poky-fcd28fd50f6f3768c41f1db184f55cd439699bd7.tar.gz |
oeqa/buildperf: don't archive stdout/stderr of commands
Stop capturing output of the shell commands into <test>/commands.log.
Redirecting output into a file prevented the unittest framework from
capturing it, causing useless errors (with empty output) like:
oeqa.utils.CommandError: Command '['bitbake', 'core-image-sato']'
returned non-zero exit status 1 with output:
In general, the console output of commands is only interesting when
something fails. Also, dropping the commands.log file is a huge saving
in disk space, and thus, repository size when results are archived in
Git.
(From OE-Core rev: e004664287ec03e7367a7bf553d9a3038444e82e)
Signed-off-by: Markus Lehtonen <markus.lehtonen@linux.intel.com>
Signed-off-by: Ross Burton <ross.burton@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r-- | meta/lib/oeqa/buildperf/base.py | 29 | ||||
-rw-r--r-- | meta/lib/oeqa/buildperf/test_basic.py | 10 |
2 files changed, 14 insertions, 25 deletions
diff --git a/meta/lib/oeqa/buildperf/base.py b/meta/lib/oeqa/buildperf/base.py index 975524c6de..dd473a0bdc 100644 --- a/meta/lib/oeqa/buildperf/base.py +++ b/meta/lib/oeqa/buildperf/base.py | |||
@@ -163,8 +163,6 @@ class BuildPerfTestResult(unittest.TextTestResult): | |||
163 | ('status', status), | 163 | ('status', status), |
164 | ('start_time', test.start_time), | 164 | ('start_time', test.start_time), |
165 | ('elapsed_time', test.elapsed_time), | 165 | ('elapsed_time', test.elapsed_time), |
166 | ('cmd_log_file', os.path.relpath(test.cmd_log_file, | ||
167 | self.out_dir)), | ||
168 | ('measurements', test.measurements)]) | 166 | ('measurements', test.measurements)]) |
169 | if status in ('ERROR', 'FAILURE', 'EXPECTED_FAILURE'): | 167 | if status in ('ERROR', 'FAILURE', 'EXPECTED_FAILURE'): |
170 | test_result['message'] = str(test.err[1]) | 168 | test_result['message'] = str(test.err[1]) |
@@ -268,18 +266,13 @@ class BuildPerfTestCase(unittest.TestCase): | |||
268 | def out_dir(self): | 266 | def out_dir(self): |
269 | return os.path.join(self.base_dir, self.name) | 267 | return os.path.join(self.base_dir, self.name) |
270 | 268 | ||
271 | @property | ||
272 | def cmd_log_file(self): | ||
273 | return os.path.join(self.out_dir, 'commands.log') | ||
274 | |||
275 | def shortDescription(self): | 269 | def shortDescription(self): |
276 | return super(BuildPerfTestCase, self).shortDescription() or "" | 270 | return super(BuildPerfTestCase, self).shortDescription() or "" |
277 | 271 | ||
278 | def setUp(self): | 272 | def setUp(self): |
279 | """Set-up fixture for each test""" | 273 | """Set-up fixture for each test""" |
280 | if self.build_target: | 274 | if self.build_target: |
281 | self.log_cmd_output(['bitbake', self.build_target, | 275 | self.run_cmd(['bitbake', self.build_target, '-c', 'fetchall']) |
282 | '-c', 'fetchall']) | ||
283 | 276 | ||
284 | def run(self, *args, **kwargs): | 277 | def run(self, *args, **kwargs): |
285 | """Run test""" | 278 | """Run test""" |
@@ -287,13 +280,12 @@ class BuildPerfTestCase(unittest.TestCase): | |||
287 | super(BuildPerfTestCase, self).run(*args, **kwargs) | 280 | super(BuildPerfTestCase, self).run(*args, **kwargs) |
288 | self.elapsed_time = datetime.now() - self.start_time | 281 | self.elapsed_time = datetime.now() - self.start_time |
289 | 282 | ||
290 | def log_cmd_output(self, cmd): | 283 | def run_cmd(self, cmd): |
291 | """Run a command and log it's output""" | 284 | """Convenience method for running a command""" |
292 | cmd_str = cmd if isinstance(cmd, str) else ' '.join(cmd) | 285 | cmd_str = cmd if isinstance(cmd, str) else ' '.join(cmd) |
293 | log.info("Logging command: %s", cmd_str) | 286 | log.info("Logging command: %s", cmd_str) |
294 | try: | 287 | try: |
295 | with open(self.cmd_log_file, 'a') as fobj: | 288 | runCmd2(cmd) |
296 | runCmd2(cmd, stdout=fobj) | ||
297 | except CommandError as err: | 289 | except CommandError as err: |
298 | log.error("Command failed: %s", err.retcode) | 290 | log.error("Command failed: %s", err.retcode) |
299 | raise | 291 | raise |
@@ -338,17 +330,14 @@ class BuildPerfTestCase(unittest.TestCase): | |||
338 | log.info("Timing command: %s", cmd_str) | 330 | log.info("Timing command: %s", cmd_str) |
339 | data_q = SimpleQueue() | 331 | data_q = SimpleQueue() |
340 | try: | 332 | try: |
341 | with open(self.cmd_log_file, 'a') as fobj: | 333 | proc = Process(target=_worker, args=(data_q, cmd,)) |
342 | proc = Process(target=_worker, args=(data_q, cmd,), | 334 | proc.start() |
343 | kwargs={'stdout': fobj}) | 335 | data = data_q.get() |
344 | proc.start() | 336 | proc.join() |
345 | data = data_q.get() | ||
346 | proc.join() | ||
347 | if isinstance(data, Exception): | 337 | if isinstance(data, Exception): |
348 | raise data | 338 | raise data |
349 | except CommandError: | 339 | except CommandError: |
350 | log.error("Command '%s' failed, see %s for more details", cmd_str, | 340 | log.error("Command '%s' failed", cmd_str) |
351 | self.cmd_log_file) | ||
352 | raise | 341 | raise |
353 | etime = data['elapsed_time'] | 342 | etime = data['elapsed_time'] |
354 | 343 | ||
diff --git a/meta/lib/oeqa/buildperf/test_basic.py b/meta/lib/oeqa/buildperf/test_basic.py index 47118306b7..1333407a65 100644 --- a/meta/lib/oeqa/buildperf/test_basic.py +++ b/meta/lib/oeqa/buildperf/test_basic.py | |||
@@ -38,8 +38,8 @@ class Test1P2(BuildPerfTestCase): | |||
38 | def test12(self): | 38 | def test12(self): |
39 | """Build virtual/kernel""" | 39 | """Build virtual/kernel""" |
40 | # Build and cleans state in order to get all dependencies pre-built | 40 | # Build and cleans state in order to get all dependencies pre-built |
41 | self.log_cmd_output(['bitbake', self.build_target]) | 41 | self.run_cmd(['bitbake', self.build_target]) |
42 | self.log_cmd_output(['bitbake', self.build_target, '-c', 'cleansstate']) | 42 | self.run_cmd(['bitbake', self.build_target, '-c', 'cleansstate']) |
43 | 43 | ||
44 | self.sync() | 44 | self.sync() |
45 | self.measure_cmd_resources(['bitbake', self.build_target], 'build', | 45 | self.measure_cmd_resources(['bitbake', self.build_target], 'build', |
@@ -74,7 +74,7 @@ class Test2(BuildPerfTestCase): | |||
74 | def test2(self): | 74 | def test2(self): |
75 | """Run core-image-sato do_rootfs with sstate""" | 75 | """Run core-image-sato do_rootfs with sstate""" |
76 | # Build once in order to populate sstate cache | 76 | # Build once in order to populate sstate cache |
77 | self.log_cmd_output(['bitbake', self.build_target]) | 77 | self.run_cmd(['bitbake', self.build_target]) |
78 | 78 | ||
79 | self.rm_tmp() | 79 | self.rm_tmp() |
80 | self.rm_cache() | 80 | self.rm_cache() |
@@ -106,8 +106,8 @@ class Test4(BuildPerfTestCase): | |||
106 | 106 | ||
107 | def test4(self): | 107 | def test4(self): |
108 | """eSDK metrics""" | 108 | """eSDK metrics""" |
109 | self.log_cmd_output("bitbake {} -c do_populate_sdk_ext".format( | 109 | self.run_cmd(['bitbake', '-c', 'do_populate_sdk_ext', |
110 | self.build_target)) | 110 | self.build_target]) |
111 | self.bb_vars = get_bb_vars(None, self.build_target) | 111 | self.bb_vars = get_bb_vars(None, self.build_target) |
112 | tmp_dir = self.bb_vars['TMPDIR'] | 112 | tmp_dir = self.bb_vars['TMPDIR'] |
113 | installer = os.path.join( | 113 | installer = os.path.join( |