summaryrefslogtreecommitdiffstats
path: root/scripts/oe-build-perf-test
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/oe-build-perf-test')
-rwxr-xr-xscripts/oe-build-perf-test19
1 files changed, 13 insertions, 6 deletions
diff --git a/scripts/oe-build-perf-test b/scripts/oe-build-perf-test
index 8142b0332b..786c715dfc 100755
--- a/scripts/oe-build-perf-test
+++ b/scripts/oe-build-perf-test
@@ -21,12 +21,15 @@ import logging
21import os 21import os
22import shutil 22import shutil
23import sys 23import sys
24import unittest
24from datetime import datetime 25from datetime import datetime
25 26
26sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)) + '/lib') 27sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)) + '/lib')
27import scriptpath 28import scriptpath
28scriptpath.add_oe_lib_path() 29scriptpath.add_oe_lib_path()
29from oeqa.buildperf import BuildPerfTestRunner, KernelDropCaches 30import oeqa.buildperf
31from oeqa.buildperf import (BuildPerfTestLoader, BuildPerfTestResult,
32 BuildPerfTestRunner, KernelDropCaches)
30from oeqa.utils.commands import runCmd 33from oeqa.utils.commands import runCmd
31 34
32 35
@@ -123,19 +126,23 @@ def main(argv=None):
123 # Check our capability to drop caches and ask pass if needed 126 # Check our capability to drop caches and ask pass if needed
124 KernelDropCaches.check() 127 KernelDropCaches.check()
125 128
129 # Load build perf tests
130 loader = BuildPerfTestLoader()
131 suite = loader.discover(start_dir=os.path.dirname(oeqa.buildperf.__file__))
126 # Set-up log file 132 # Set-up log file
127 out_dir = args.out_dir.format(date=datetime.now().strftime('%Y%m%d%H%M%S')) 133 out_dir = args.out_dir.format(date=datetime.now().strftime('%Y%m%d%H%M%S'))
128 setup_file_logging(os.path.join(out_dir, 'output.log')) 134 setup_file_logging(os.path.join(out_dir, 'output.log'))
129 135
130 # Run actual tests 136 # Run actual tests
131 runner = BuildPerfTestRunner(out_dir)
132 archive_build_conf(out_dir) 137 archive_build_conf(out_dir)
133 ret = runner.run_tests() 138 runner = BuildPerfTestRunner(out_dir, verbosity=2)
134 if not ret: 139 result = runner.run(suite)
140 if result.wasSuccessful():
135 if args.globalres_file: 141 if args.globalres_file:
136 runner.update_globalres_file(args.globalres_file) 142 result.update_globalres_file(args.globalres_file)
143 return 0
137 144
138 return ret 145 return 1
139 146
140 147
141if __name__ == '__main__': 148if __name__ == '__main__':