summaryrefslogtreecommitdiffstats
path: root/scripts
diff options
context:
space:
mode:
authorMarkus Lehtonen <markus.lehtonen@linux.intel.com>2016-12-28 16:01:21 +0200
committerRichard Purdie <richard.purdie@linuxfoundation.org>2017-01-23 12:05:22 +0000
commit0981bcb098bc4e5a63d3184fac3a7ab701866c1a (patch)
tree657a46607d5b9d6f31670ca4e7d31134f09cf861 /scripts
parent0e2d84728bc625b5055bbc0512ae2cd06cd45dcf (diff)
downloadpoky-0981bcb098bc4e5a63d3184fac3a7ab701866c1a.tar.gz
oe-build-perf-test: save test metadata in a separate file
The patch introduces a new metadata (.json or .xml) file in the output directory. All test meta data, e.g. git revision information and tester host information is now stored there. The JSON report format is slightly changed as the metadata is not present in results.json anymore. [YOCTO #10590] (From OE-Core rev: 2036c646019660e32f1bc277fdec0cdbff0afdd4) Signed-off-by: Markus Lehtonen <markus.lehtonen@linux.intel.com> Signed-off-by: Ross Burton <ross.burton@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/oe-build-perf-test118
1 files changed, 114 insertions, 4 deletions
diff --git a/scripts/oe-build-perf-test b/scripts/oe-build-perf-test
index 4ec9f1403e..fc4ab3135d 100755
--- a/scripts/oe-build-perf-test
+++ b/scripts/oe-build-perf-test
@@ -17,8 +17,10 @@
17import argparse 17import argparse
18import errno 18import errno
19import fcntl 19import fcntl
20import json
20import logging 21import logging
21import os 22import os
23import re
22import shutil 24import shutil
23import sys 25import sys
24import unittest 26import unittest
@@ -27,11 +29,13 @@ from datetime import datetime
27sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)) + '/lib') 29sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)) + '/lib')
28import scriptpath 30import scriptpath
29scriptpath.add_oe_lib_path() 31scriptpath.add_oe_lib_path()
32scriptpath.add_bitbake_lib_path()
30import oeqa.buildperf 33import oeqa.buildperf
31from oeqa.buildperf import (BuildPerfTestLoader, BuildPerfTestResult, 34from oeqa.buildperf import (BuildPerfTestLoader, BuildPerfTestResult,
32 BuildPerfTestRunner, KernelDropCaches) 35 BuildPerfTestRunner, KernelDropCaches)
33from oeqa.utils.commands import runCmd 36from oeqa.utils.commands import runCmd
34from oeqa.utils.git import GitRepo, GitError 37from oeqa.utils.git import GitRepo, GitError
38from oeqa.utils.metadata import metadata_from_bb, write_metadata_file
35 39
36 40
37# Set-up logging 41# Set-up logging
@@ -115,6 +119,100 @@ def archive_build_conf(out_dir):
115 shutil.copytree(src_dir, tgt_dir) 119 shutil.copytree(src_dir, tgt_dir)
116 120
117 121
122def git_commit_results(repo_dir, results_dir, branch, tag, metadata):
123 """Commit results into a Git repository"""
124 repo = GitRepo(repo_dir, is_topdir=True)
125 distro_branch = metadata['layers']['meta']['branch']
126 distro_commit = metadata['layers']['meta']['commit']
127 distro_commit_count = metadata['layers']['meta']['commit_count']
128
129 # Replace keywords
130 branch = branch.format(git_branch=distro_branch,
131 tester_host=metadata['hostname'])
132
133 log.info("Committing test results into %s %s", repo_dir, branch)
134 tmp_index = os.path.join(repo_dir, '.git', 'index.oe-build-perf')
135 try:
136 # Create new commit object from the new results
137 env_update = {'GIT_INDEX_FILE': tmp_index,
138 'GIT_WORK_TREE': results_dir}
139 repo.run_cmd('add .', env_update)
140 tree = repo.run_cmd('write-tree', env_update)
141 parent = repo.rev_parse(branch)
142 msg = "Results of {}:{}\n".format(distro_branch, distro_commit)
143 git_cmd = ['commit-tree', tree, '-m', msg]
144 if parent:
145 git_cmd += ['-p', parent]
146 commit = repo.run_cmd(git_cmd, env_update)
147
148 # Update branch head
149 git_cmd = ['update-ref', 'refs/heads/' + branch, commit]
150 if parent:
151 git_cmd.append(parent)
152 repo.run_cmd(git_cmd)
153
154 # Update current HEAD, if we're on branch 'branch'
155 if repo.get_current_branch() == branch:
156 log.info("Updating %s HEAD to latest commit", repo_dir)
157 repo.run_cmd('reset --hard')
158
159 # Create (annotated) tag
160 if tag:
161 # Find tags matching the pattern
162 tag_keywords = dict(git_branch=distro_branch,
163 git_commit=distro_commit,
164 git_commit_count=distro_commit_count,
165 tester_host=metadata['hostname'],
166 tag_num='[0-9]{1,5}')
167 tag_re = re.compile(tag.format(**tag_keywords) + '$')
168 tag_keywords['tag_num'] = 0
169 for existing_tag in repo.run_cmd('tag').splitlines():
170 if tag_re.match(existing_tag):
171 tag_keywords['tag_num'] += 1
172
173 tag = tag.format(**tag_keywords)
174 msg = "Test run #{} of {}:{}\n".format(tag_keywords['tag_num'],
175 distro_branch,
176 distro_commit)
177 repo.run_cmd(['tag', '-a', '-m', msg, tag, commit])
178
179 finally:
180 if os.path.exists(tmp_index):
181 os.unlink(tmp_index)
182
183
184def update_globalres_file(result_obj, filename, metadata):
185 """Write results to globalres csv file"""
186 # Map test names to time and size columns in globalres
187 # The tuples represent index and length of times and sizes
188 # respectively
189 gr_map = {'test1': ((0, 1), (8, 1)),
190 'test12': ((1, 1), (None, None)),
191 'test13': ((2, 1), (9, 1)),
192 'test2': ((3, 1), (None, None)),
193 'test3': ((4, 3), (None, None)),
194 'test4': ((7, 1), (10, 2))}
195
196 values = ['0'] * 12
197 for status, test, _ in result_obj.all_results():
198 if status in ['ERROR', 'SKIPPED']:
199 continue
200 (t_ind, t_len), (s_ind, s_len) = gr_map[test.name]
201 if t_ind is not None:
202 values[t_ind:t_ind + t_len] = test.times
203 if s_ind is not None:
204 values[s_ind:s_ind + s_len] = test.sizes
205
206 log.debug("Writing globalres log to %s", filename)
207 rev_info = metadata['layers']['meta']
208 with open(filename, 'a') as fobj:
209 fobj.write('{},{}:{},{},'.format(metadata['hostname'],
210 rev_info['branch'],
211 rev_info['commit'],
212 rev_info['commit']))
213 fobj.write(','.join(values) + '\n')
214
215
118def parse_args(argv): 216def parse_args(argv):
119 """Parse command line arguments""" 217 """Parse command line arguments"""
120 parser = argparse.ArgumentParser( 218 parser = argparse.ArgumentParser(
@@ -183,7 +281,19 @@ def main(argv=None):
183 else: 281 else:
184 suite = loader.loadTestsFromModule(oeqa.buildperf) 282 suite = loader.loadTestsFromModule(oeqa.buildperf)
185 283
284 # Save test metadata
285 metadata = metadata_from_bb()
286 log.info("Testing Git revision branch:commit %s:%s (%s)",
287 metadata['layers']['meta']['branch'],
288 metadata['layers']['meta']['commit'],
289 metadata['layers']['meta']['commit_count'])
290 if args.xml:
291 write_metadata_file(os.path.join(out_dir, 'metadata.xml'), metadata)
292 else:
293 with open(os.path.join(out_dir, 'metadata.json'), 'w') as fobj:
294 json.dump(metadata, fobj, indent=2)
186 archive_build_conf(out_dir) 295 archive_build_conf(out_dir)
296
187 runner = BuildPerfTestRunner(out_dir, verbosity=2) 297 runner = BuildPerfTestRunner(out_dir, verbosity=2)
188 298
189 # Suppress logger output to stderr so that the output from unittest 299 # Suppress logger output to stderr so that the output from unittest
@@ -201,11 +311,11 @@ def main(argv=None):
201 else: 311 else:
202 result.write_results_json() 312 result.write_results_json()
203 if args.globalres_file: 313 if args.globalres_file:
204 result.update_globalres_file(args.globalres_file) 314 update_globalres_file(result, args.globalres_file, metadata)
205 if args.commit_results: 315 if args.commit_results:
206 result.git_commit_results(args.commit_results, 316 git_commit_results(args.commit_results, out_dir,
207 args.commit_results_branch, 317 args.commit_results_branch, args.commit_results_tag,
208 args.commit_results_tag) 318 metadata)
209 if result.wasSuccessful(): 319 if result.wasSuccessful():
210 return 0 320 return 0
211 321