diff options
author | Tudor Florea <tudor.florea@enea.com> | 2014-10-16 03:05:19 +0200 |
---|---|---|
committer | Tudor Florea <tudor.florea@enea.com> | 2014-10-16 03:05:19 +0200 |
commit | c527fd1f14c27855a37f2e8ac5346ce8d940ced2 (patch) | |
tree | bb002c1fdf011c41dbd2f0927bed23ecb5f83c97 /bitbake/lib/bb/ui/buildinfohelper.py | |
download | poky-daisy-140929.tar.gz |
initial commit for Enea Linux 4.0-140929daisy-140929
Migrated from the internal git server on the daisy-enea-point-release branch
Signed-off-by: Tudor Florea <tudor.florea@enea.com>
Diffstat (limited to 'bitbake/lib/bb/ui/buildinfohelper.py')
-rw-r--r-- | bitbake/lib/bb/ui/buildinfohelper.py | 964 |
1 files changed, 964 insertions, 0 deletions
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py new file mode 100644 index 0000000000..69655709d3 --- /dev/null +++ b/bitbake/lib/bb/ui/buildinfohelper.py | |||
@@ -0,0 +1,964 @@ | |||
1 | # | ||
2 | # BitBake ToasterUI Implementation | ||
3 | # | ||
4 | # Copyright (C) 2013 Intel Corporation | ||
5 | # | ||
6 | # This program is free software; you can redistribute it and/or modify | ||
7 | # it under the terms of the GNU General Public License version 2 as | ||
8 | # published by the Free Software Foundation. | ||
9 | # | ||
10 | # This program is distributed in the hope that it will be useful, | ||
11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
13 | # GNU General Public License for more details. | ||
14 | # | ||
15 | # You should have received a copy of the GNU General Public License along | ||
16 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
17 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
18 | |||
19 | import datetime | ||
20 | import sys | ||
21 | import bb | ||
22 | import re | ||
23 | import ast | ||
24 | |||
25 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "toaster.toastermain.settings") | ||
26 | |||
27 | import toaster.toastermain.settings as toaster_django_settings | ||
28 | from toaster.orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText | ||
29 | from toaster.orm.models import Target_Image_File | ||
30 | from toaster.orm.models import Variable, VariableHistory | ||
31 | from toaster.orm.models import Package, Package_File, Target_Installed_Package, Target_File | ||
32 | from toaster.orm.models import Task_Dependency, Package_Dependency | ||
33 | from toaster.orm.models import Recipe_Dependency | ||
34 | from bb.msg import BBLogFormatter as format | ||
35 | |||
36 | class NotExisting(Exception): | ||
37 | pass | ||
38 | |||
39 | class ORMWrapper(object): | ||
40 | """ This class creates the dictionaries needed to store information in the database | ||
41 | following the format defined by the Django models. It is also used to save this | ||
42 | information in the database. | ||
43 | """ | ||
44 | |||
45 | def __init__(self): | ||
46 | pass | ||
47 | |||
48 | |||
49 | def create_build_object(self, build_info): | ||
50 | assert 'machine' in build_info | ||
51 | assert 'distro' in build_info | ||
52 | assert 'distro_version' in build_info | ||
53 | assert 'started_on' in build_info | ||
54 | assert 'cooker_log_path' in build_info | ||
55 | assert 'build_name' in build_info | ||
56 | assert 'bitbake_version' in build_info | ||
57 | |||
58 | build = Build.objects.create( | ||
59 | machine=build_info['machine'], | ||
60 | distro=build_info['distro'], | ||
61 | distro_version=build_info['distro_version'], | ||
62 | started_on=build_info['started_on'], | ||
63 | completed_on=build_info['started_on'], | ||
64 | cooker_log_path=build_info['cooker_log_path'], | ||
65 | build_name=build_info['build_name'], | ||
66 | bitbake_version=build_info['bitbake_version']) | ||
67 | |||
68 | return build | ||
69 | |||
70 | def create_target_objects(self, target_info): | ||
71 | assert 'build' in target_info | ||
72 | assert 'targets' in target_info | ||
73 | |||
74 | targets = [] | ||
75 | for tgt_name in target_info['targets']: | ||
76 | tgt_object = Target.objects.create( build = target_info['build'], | ||
77 | target = tgt_name, | ||
78 | is_image = False, | ||
79 | ); | ||
80 | targets.append(tgt_object) | ||
81 | return targets | ||
82 | |||
83 | def update_build_object(self, build, errors, warnings, taskfailures): | ||
84 | assert isinstance(build,Build) | ||
85 | assert isinstance(errors, int) | ||
86 | assert isinstance(warnings, int) | ||
87 | |||
88 | outcome = Build.SUCCEEDED | ||
89 | if errors or taskfailures: | ||
90 | outcome = Build.FAILED | ||
91 | |||
92 | build.completed_on = datetime.datetime.now() | ||
93 | build.timespent = int((build.completed_on - build.started_on).total_seconds()) | ||
94 | build.errors_no = errors | ||
95 | build.warnings_no = warnings | ||
96 | build.outcome = outcome | ||
97 | build.save() | ||
98 | |||
99 | def update_target_object(self, target, license_manifest_path): | ||
100 | |||
101 | target.license_manifest_path = license_manifest_path | ||
102 | target.save() | ||
103 | |||
104 | def get_update_task_object(self, task_information, must_exist = False): | ||
105 | assert 'build' in task_information | ||
106 | assert 'recipe' in task_information | ||
107 | assert 'task_name' in task_information | ||
108 | |||
109 | task_object, created = Task.objects.get_or_create( | ||
110 | build=task_information['build'], | ||
111 | recipe=task_information['recipe'], | ||
112 | task_name=task_information['task_name'], | ||
113 | ) | ||
114 | |||
115 | if must_exist and created: | ||
116 | task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk) | ||
117 | task_object.delete() | ||
118 | raise NotExisting("Task object created when expected to exist", task_information) | ||
119 | |||
120 | for v in vars(task_object): | ||
121 | if v in task_information.keys(): | ||
122 | vars(task_object)[v] = task_information[v] | ||
123 | |||
124 | # update setscene-related information | ||
125 | if 1 == Task.objects.related_setscene(task_object).count(): | ||
126 | if task_object.outcome == Task.OUTCOME_COVERED: | ||
127 | task_object.outcome = Task.OUTCOME_CACHED | ||
128 | |||
129 | outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build, | ||
130 | recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome | ||
131 | if outcome_task_setscene == Task.OUTCOME_SUCCESS: | ||
132 | task_object.sstate_result = Task.SSTATE_RESTORED | ||
133 | elif outcome_task_setscene == Task.OUTCOME_FAILED: | ||
134 | task_object.sstate_result = Task.SSTATE_FAILED | ||
135 | |||
136 | # mark down duration if we have a start time and a current time | ||
137 | if 'start_time' in task_information.keys() and 'end_time' in task_information.keys(): | ||
138 | duration = task_information['end_time'] - task_information['start_time'] | ||
139 | task_object.elapsed_time = duration | ||
140 | |||
141 | task_object.save() | ||
142 | return task_object | ||
143 | |||
144 | |||
145 | def get_update_recipe_object(self, recipe_information, must_exist = False): | ||
146 | assert 'layer_version' in recipe_information | ||
147 | assert 'file_path' in recipe_information | ||
148 | |||
149 | |||
150 | recipe_object, created = Recipe.objects.get_or_create( | ||
151 | layer_version=recipe_information['layer_version'], | ||
152 | file_path=recipe_information['file_path']) | ||
153 | |||
154 | if must_exist and created: | ||
155 | recipe_object.delete() | ||
156 | raise NotExisting("Recipe object created when expected to exist", recipe_information) | ||
157 | |||
158 | for v in vars(recipe_object): | ||
159 | if v in recipe_information.keys(): | ||
160 | vars(recipe_object)[v] = recipe_information[v] | ||
161 | |||
162 | recipe_object.save() | ||
163 | |||
164 | return recipe_object | ||
165 | |||
166 | def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information): | ||
167 | assert isinstance(build_obj, Build) | ||
168 | assert isinstance(layer_obj, Layer) | ||
169 | assert 'branch' in layer_version_information | ||
170 | assert 'commit' in layer_version_information | ||
171 | assert 'priority' in layer_version_information | ||
172 | |||
173 | layer_version_object, created = Layer_Version.objects.get_or_create( | ||
174 | build = build_obj, | ||
175 | layer = layer_obj, | ||
176 | branch = layer_version_information['branch'], | ||
177 | commit = layer_version_information['commit'], | ||
178 | priority = layer_version_information['priority'] | ||
179 | ) | ||
180 | |||
181 | return layer_version_object | ||
182 | |||
183 | def get_update_layer_object(self, layer_information): | ||
184 | assert 'name' in layer_information | ||
185 | assert 'local_path' in layer_information | ||
186 | assert 'layer_index_url' in layer_information | ||
187 | |||
188 | layer_object, created = Layer.objects.get_or_create( | ||
189 | name=layer_information['name'], | ||
190 | local_path=layer_information['local_path'], | ||
191 | layer_index_url=layer_information['layer_index_url']) | ||
192 | |||
193 | return layer_object | ||
194 | |||
195 | def save_target_file_information(self, build_obj, target_obj, filedata): | ||
196 | assert isinstance(build_obj, Build) | ||
197 | assert isinstance(target_obj, Target) | ||
198 | dirs = filedata['dirs'] | ||
199 | files = filedata['files'] | ||
200 | syms = filedata['syms'] | ||
201 | |||
202 | # we insert directories, ordered by name depth | ||
203 | for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))): | ||
204 | (user, group, size) = d[1:4] | ||
205 | permission = d[0][1:] | ||
206 | path = d[4].lstrip(".") | ||
207 | if len(path) == 0: | ||
208 | # we create the root directory as a special case | ||
209 | path = "/" | ||
210 | tf_obj = Target_File.objects.create( | ||
211 | target = target_obj, | ||
212 | path = path, | ||
213 | size = size, | ||
214 | inodetype = Target_File.ITYPE_DIRECTORY, | ||
215 | permission = permission, | ||
216 | owner = user, | ||
217 | group = group, | ||
218 | ) | ||
219 | tf_obj.directory = tf_obj | ||
220 | tf_obj.save() | ||
221 | continue | ||
222 | parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) | ||
223 | if len(parent_path) == 0: | ||
224 | parent_path = "/" | ||
225 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | ||
226 | tf_obj = Target_File.objects.create( | ||
227 | target = target_obj, | ||
228 | path = path, | ||
229 | size = size, | ||
230 | inodetype = Target_File.ITYPE_DIRECTORY, | ||
231 | permission = permission, | ||
232 | owner = user, | ||
233 | group = group, | ||
234 | directory = parent_obj) | ||
235 | |||
236 | |||
237 | # we insert files | ||
238 | for d in files: | ||
239 | (user, group, size) = d[1:4] | ||
240 | permission = d[0][1:] | ||
241 | path = d[4].lstrip(".") | ||
242 | parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) | ||
243 | inodetype = Target_File.ITYPE_REGULAR | ||
244 | if d[0].startswith('b'): | ||
245 | inodetype = Target_File.ITYPE_BLOCK | ||
246 | if d[0].startswith('c'): | ||
247 | inodetype = Target_File.ITYPE_CHARACTER | ||
248 | if d[0].startswith('p'): | ||
249 | inodetype = Target_File.ITYPE_FIFO | ||
250 | |||
251 | tf_obj = Target_File.objects.create( | ||
252 | target = target_obj, | ||
253 | path = path, | ||
254 | size = size, | ||
255 | inodetype = inodetype, | ||
256 | permission = permission, | ||
257 | owner = user, | ||
258 | group = group) | ||
259 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | ||
260 | tf_obj.directory = parent_obj | ||
261 | tf_obj.save() | ||
262 | |||
263 | # we insert symlinks | ||
264 | for d in syms: | ||
265 | (user, group, size) = d[1:4] | ||
266 | permission = d[0][1:] | ||
267 | path = d[4].lstrip(".") | ||
268 | filetarget_path = d[6] | ||
269 | |||
270 | parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) | ||
271 | if not filetarget_path.startswith("/"): | ||
272 | # we have a relative path, get a normalized absolute one | ||
273 | filetarget_path = parent_path + "/" + filetarget_path | ||
274 | fcp = filetarget_path.split("/") | ||
275 | fcpl = [] | ||
276 | for i in fcp: | ||
277 | if i == "..": | ||
278 | fcpl.pop() | ||
279 | else: | ||
280 | fcpl.append(i) | ||
281 | filetarget_path = "/".join(fcpl) | ||
282 | |||
283 | try: | ||
284 | filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path) | ||
285 | except Exception as e: | ||
286 | # we might have an invalid link; no way to detect this. just set it to None | ||
287 | filetarget_obj = None | ||
288 | |||
289 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | ||
290 | |||
291 | tf_obj = Target_File.objects.create( | ||
292 | target = target_obj, | ||
293 | path = path, | ||
294 | size = size, | ||
295 | inodetype = Target_File.ITYPE_SYMLINK, | ||
296 | permission = permission, | ||
297 | owner = user, | ||
298 | group = group, | ||
299 | directory = parent_obj, | ||
300 | sym_target = filetarget_obj) | ||
301 | |||
302 | |||
303 | def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes): | ||
304 | assert isinstance(build_obj, Build) | ||
305 | assert isinstance(target_obj, Target) | ||
306 | |||
307 | errormsg = "" | ||
308 | for p in packagedict: | ||
309 | searchname = p | ||
310 | if 'OPKGN' in pkgpnmap[p].keys(): | ||
311 | searchname = pkgpnmap[p]['OPKGN'] | ||
312 | |||
313 | packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname ) | ||
314 | if created: | ||
315 | # package was not build in the current build, but | ||
316 | # fill in everything we can from the runtime-reverse package data | ||
317 | try: | ||
318 | packagedict[p]['object'].recipe = recipes[pkgpnmap[p]['PN']] | ||
319 | packagedict[p]['object'].version = pkgpnmap[p]['PV'] | ||
320 | packagedict[p]['object'].installed_name = p | ||
321 | packagedict[p]['object'].revision = pkgpnmap[p]['PR'] | ||
322 | packagedict[p]['object'].license = pkgpnmap[p]['LICENSE'] | ||
323 | packagedict[p]['object'].section = pkgpnmap[p]['SECTION'] | ||
324 | packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY'] | ||
325 | packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION'] | ||
326 | packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE']) | ||
327 | |||
328 | # no files recorded for this package, so save files info | ||
329 | for targetpath in pkgpnmap[p]['FILES_INFO']: | ||
330 | targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath] | ||
331 | Package_File.objects.create( package = packagedict[p]['object'], | ||
332 | path = targetpath, | ||
333 | size = targetfilesize) | ||
334 | except KeyError as e: | ||
335 | errormsg += " stpi: Key error, package %s key %s \n" % ( p, e ) | ||
336 | |||
337 | # save disk installed size | ||
338 | packagedict[p]['object'].installed_size = packagedict[p]['size'] | ||
339 | packagedict[p]['object'].save() | ||
340 | |||
341 | Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object']) | ||
342 | |||
343 | for p in packagedict: | ||
344 | for (px,deptype) in packagedict[p]['depends']: | ||
345 | if deptype == 'depends': | ||
346 | tdeptype = Package_Dependency.TYPE_TRDEPENDS | ||
347 | elif deptype == 'recommends': | ||
348 | tdeptype = Package_Dependency.TYPE_TRECOMMENDS | ||
349 | |||
350 | Package_Dependency.objects.create( package = packagedict[p]['object'], | ||
351 | depends_on = packagedict[px]['object'], | ||
352 | dep_type = tdeptype, | ||
353 | target = target_obj); | ||
354 | |||
355 | if (len(errormsg) > 0): | ||
356 | raise Exception(errormsg) | ||
357 | |||
358 | def save_target_image_file_information(self, target_obj, file_name, file_size): | ||
359 | target_image_file = Target_Image_File.objects.create( target = target_obj, | ||
360 | file_name = file_name, | ||
361 | file_size = file_size) | ||
362 | target_image_file.save() | ||
363 | |||
364 | def create_logmessage(self, log_information): | ||
365 | assert 'build' in log_information | ||
366 | assert 'level' in log_information | ||
367 | assert 'message' in log_information | ||
368 | |||
369 | log_object = LogMessage.objects.create( | ||
370 | build = log_information['build'], | ||
371 | level = log_information['level'], | ||
372 | message = log_information['message']) | ||
373 | |||
374 | for v in vars(log_object): | ||
375 | if v in log_information.keys(): | ||
376 | vars(log_object)[v] = log_information[v] | ||
377 | |||
378 | return log_object.save() | ||
379 | |||
380 | |||
381 | def save_build_package_information(self, build_obj, package_info, recipes): | ||
382 | assert isinstance(build_obj, Build) | ||
383 | |||
384 | # create and save the object | ||
385 | pname = package_info['PKG'] | ||
386 | if 'OPKGN' in package_info.keys(): | ||
387 | pname = package_info['OPKGN'] | ||
388 | |||
389 | bp_object, created = Package.objects.get_or_create( build = build_obj, | ||
390 | name = pname ) | ||
391 | |||
392 | bp_object.installed_name = package_info['PKG'] | ||
393 | bp_object.recipe = recipes[package_info['PN']] | ||
394 | bp_object.version = package_info['PKGV'] | ||
395 | bp_object.revision = package_info['PKGR'] | ||
396 | bp_object.summary = package_info['SUMMARY'] | ||
397 | bp_object.description = package_info['DESCRIPTION'] | ||
398 | bp_object.size = int(package_info['PKGSIZE']) | ||
399 | bp_object.section = package_info['SECTION'] | ||
400 | bp_object.license = package_info['LICENSE'] | ||
401 | bp_object.save() | ||
402 | |||
403 | # save any attached file information | ||
404 | for path in package_info['FILES_INFO']: | ||
405 | fo = Package_File.objects.create( package = bp_object, | ||
406 | path = path, | ||
407 | size = package_info['FILES_INFO'][path] ) | ||
408 | |||
409 | def _po_byname(p): | ||
410 | pkg, created = Package.objects.get_or_create(build = build_obj, name = p) | ||
411 | if created: | ||
412 | pkg.size = -1 | ||
413 | pkg.save() | ||
414 | return pkg | ||
415 | |||
416 | # save soft dependency information | ||
417 | if 'RDEPENDS' in package_info and package_info['RDEPENDS']: | ||
418 | for p in bb.utils.explode_deps(package_info['RDEPENDS']): | ||
419 | Package_Dependency.objects.get_or_create( package = bp_object, | ||
420 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS) | ||
421 | if 'RPROVIDES' in package_info and package_info['RPROVIDES']: | ||
422 | for p in bb.utils.explode_deps(package_info['RPROVIDES']): | ||
423 | Package_Dependency.objects.get_or_create( package = bp_object, | ||
424 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES) | ||
425 | if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']: | ||
426 | for p in bb.utils.explode_deps(package_info['RRECOMMENDS']): | ||
427 | Package_Dependency.objects.get_or_create( package = bp_object, | ||
428 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS) | ||
429 | if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']: | ||
430 | for p in bb.utils.explode_deps(package_info['RSUGGESTS']): | ||
431 | Package_Dependency.objects.get_or_create( package = bp_object, | ||
432 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS) | ||
433 | if 'RREPLACES' in package_info and package_info['RREPLACES']: | ||
434 | for p in bb.utils.explode_deps(package_info['RREPLACES']): | ||
435 | Package_Dependency.objects.get_or_create( package = bp_object, | ||
436 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES) | ||
437 | if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']: | ||
438 | for p in bb.utils.explode_deps(package_info['RCONFLICTS']): | ||
439 | Package_Dependency.objects.get_or_create( package = bp_object, | ||
440 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS) | ||
441 | |||
442 | return bp_object | ||
443 | |||
444 | def save_build_variables(self, build_obj, vardump): | ||
445 | assert isinstance(build_obj, Build) | ||
446 | |||
447 | for k in vardump: | ||
448 | desc = vardump[k]['doc']; | ||
449 | if desc is None: | ||
450 | var_words = [word for word in k.split('_')] | ||
451 | root_var = "_".join([word for word in var_words if word.isupper()]) | ||
452 | if root_var and root_var != k and root_var in vardump: | ||
453 | desc = vardump[root_var]['doc'] | ||
454 | if desc is None: | ||
455 | desc = '' | ||
456 | if desc: | ||
457 | helptext_obj = HelpText.objects.create(build=build_obj, | ||
458 | area=HelpText.VARIABLE, | ||
459 | key=k, | ||
460 | text=desc) | ||
461 | if not bool(vardump[k]['func']): | ||
462 | value = vardump[k]['v']; | ||
463 | if value is None: | ||
464 | value = '' | ||
465 | variable_obj = Variable.objects.create( build = build_obj, | ||
466 | variable_name = k, | ||
467 | variable_value = value, | ||
468 | description = desc) | ||
469 | for vh in vardump[k]['history']: | ||
470 | if not 'documentation.conf' in vh['file']: | ||
471 | VariableHistory.objects.create( variable = variable_obj, | ||
472 | file_name = vh['file'], | ||
473 | line_number = vh['line'], | ||
474 | operation = vh['op']) | ||
475 | |||
476 | class BuildInfoHelper(object): | ||
477 | """ This class gathers the build information from the server and sends it | ||
478 | towards the ORM wrapper for storing in the database | ||
479 | It is instantiated once per build | ||
480 | Keeps in memory all data that needs matching before writing it to the database | ||
481 | """ | ||
482 | |||
483 | def __init__(self, server, has_build_history = False): | ||
484 | self._configure_django() | ||
485 | self.internal_state = {} | ||
486 | self.internal_state['taskdata'] = {} | ||
487 | self.task_order = 0 | ||
488 | self.server = server | ||
489 | self.orm_wrapper = ORMWrapper() | ||
490 | self.has_build_history = has_build_history | ||
491 | self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0] | ||
492 | |||
493 | def _configure_django(self): | ||
494 | # Add toaster to sys path for importing modules | ||
495 | sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'toaster')) | ||
496 | |||
497 | ################### | ||
498 | ## methods to convert event/external info into objects that the ORM layer uses | ||
499 | |||
500 | |||
501 | def _get_build_information(self): | ||
502 | build_info = {} | ||
503 | # Generate an identifier for each new build | ||
504 | |||
505 | build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0] | ||
506 | build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0] | ||
507 | build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0] | ||
508 | build_info['started_on'] = datetime.datetime.now() | ||
509 | build_info['completed_on'] = datetime.datetime.now() | ||
510 | build_info['cooker_log_path'] = self.server.runCommand(["getVariable", "BB_CONSOLELOG"])[0] | ||
511 | build_info['build_name'] = self.server.runCommand(["getVariable", "BUILDNAME"])[0] | ||
512 | build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0] | ||
513 | |||
514 | return build_info | ||
515 | |||
516 | def _get_task_information(self, event, recipe): | ||
517 | assert 'taskname' in vars(event) | ||
518 | |||
519 | task_information = {} | ||
520 | task_information['build'] = self.internal_state['build'] | ||
521 | task_information['outcome'] = Task.OUTCOME_NA | ||
522 | task_information['recipe'] = recipe | ||
523 | task_information['task_name'] = event.taskname | ||
524 | try: | ||
525 | # some tasks don't come with a hash. and that's ok | ||
526 | task_information['sstate_checksum'] = event.taskhash | ||
527 | except AttributeError: | ||
528 | pass | ||
529 | return task_information | ||
530 | |||
531 | def _get_layer_version_for_path(self, path): | ||
532 | assert path.startswith("/") | ||
533 | assert 'build' in self.internal_state | ||
534 | |||
535 | def _slkey(layer_version): | ||
536 | assert isinstance(layer_version, Layer_Version) | ||
537 | return len(layer_version.layer.local_path) | ||
538 | |||
539 | # Heuristics: we always match recipe to the deepest layer path that | ||
540 | # we can match to the recipe file path | ||
541 | for bl in sorted(Layer_Version.objects.filter(build = self.internal_state['build']), reverse=True, key=_slkey): | ||
542 | if (path.startswith(bl.layer.local_path)): | ||
543 | return bl | ||
544 | |||
545 | #TODO: if we get here, we didn't read layers correctly | ||
546 | assert False | ||
547 | return None | ||
548 | |||
549 | def _get_recipe_information_from_taskfile(self, taskfile): | ||
550 | localfilepath = taskfile.split(":")[-1] | ||
551 | layer_version_obj = self._get_layer_version_for_path(localfilepath) | ||
552 | |||
553 | recipe_info = {} | ||
554 | recipe_info['layer_version'] = layer_version_obj | ||
555 | recipe_info['file_path'] = taskfile | ||
556 | |||
557 | return recipe_info | ||
558 | |||
559 | def _get_path_information(self, task_object): | ||
560 | assert isinstance(task_object, Task) | ||
561 | build_stats_format = "{tmpdir}/buildstats/{target}-{machine}/{buildname}/{package}/" | ||
562 | build_stats_path = [] | ||
563 | |||
564 | for t in self.internal_state['targets']: | ||
565 | target = t.target | ||
566 | machine = self.internal_state['build'].machine | ||
567 | buildname = self.internal_state['build'].build_name | ||
568 | pe, pv = task_object.recipe.version.split(":",1) | ||
569 | if len(pe) > 0: | ||
570 | package = task_object.recipe.name + "-" + pe + "_" + pv | ||
571 | else: | ||
572 | package = task_object.recipe.name + "-" + pv | ||
573 | |||
574 | build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir, target=target, | ||
575 | machine=machine, buildname=buildname, | ||
576 | package=package)) | ||
577 | |||
578 | return build_stats_path | ||
579 | |||
580 | def _remove_redundant(self, string): | ||
581 | ret = [] | ||
582 | for i in string.split(): | ||
583 | if i not in ret: | ||
584 | ret.append(i) | ||
585 | return " ".join(sorted(ret)) | ||
586 | |||
587 | |||
588 | ################################ | ||
589 | ## external available methods to store information | ||
590 | |||
591 | def store_layer_info(self, event): | ||
592 | assert 'data' in vars(event) | ||
593 | layerinfos = event.data | ||
594 | self.internal_state['lvs'] = {} | ||
595 | for layer in layerinfos: | ||
596 | self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer])] = layerinfos[layer]['version'] | ||
597 | |||
598 | |||
599 | def store_started_build(self, event): | ||
600 | assert '_pkgs' in vars(event) | ||
601 | build_information = self._get_build_information() | ||
602 | |||
603 | build_obj = self.orm_wrapper.create_build_object(build_information) | ||
604 | self.internal_state['build'] = build_obj | ||
605 | |||
606 | # save layer version information for this build | ||
607 | for layer_obj in self.internal_state['lvs']: | ||
608 | self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj]) | ||
609 | |||
610 | del self.internal_state['lvs'] | ||
611 | |||
612 | # create target information | ||
613 | target_information = {} | ||
614 | target_information['targets'] = event._pkgs | ||
615 | target_information['build'] = build_obj | ||
616 | |||
617 | self.internal_state['targets'] = self.orm_wrapper.create_target_objects(target_information) | ||
618 | |||
619 | # Save build configuration | ||
620 | self.orm_wrapper.save_build_variables(build_obj, self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]) | ||
621 | |||
622 | def update_target_image_file(self, event): | ||
623 | image_fstypes = self.server.runCommand(["getVariable", "IMAGE_FSTYPES"])[0] | ||
624 | for t in self.internal_state['targets']: | ||
625 | if t.is_image == True: | ||
626 | output_files = list(event.data.viewkeys()) | ||
627 | for output in output_files: | ||
628 | if t.target in output and output.split('.rootfs.')[1] in image_fstypes: | ||
629 | self.orm_wrapper.save_target_image_file_information(t, output, event.data[output]) | ||
630 | |||
631 | def update_build_information(self, event, errors, warnings, taskfailures): | ||
632 | if 'build' in self.internal_state: | ||
633 | self.orm_wrapper.update_build_object(self.internal_state['build'], errors, warnings, taskfailures) | ||
634 | |||
635 | |||
636 | def store_license_manifest_path(self, event): | ||
637 | deploy_dir = event.data['deploy_dir'] | ||
638 | image_name = event.data['image_name'] | ||
639 | path = deploy_dir + "/licenses/" + image_name + "/" | ||
640 | for target in self.internal_state['targets']: | ||
641 | if target.target in image_name: | ||
642 | self.orm_wrapper.update_target_object(target, path) | ||
643 | |||
644 | |||
645 | def store_started_task(self, event): | ||
646 | assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped)) | ||
647 | assert 'taskfile' in vars(event) | ||
648 | localfilepath = event.taskfile.split(":")[-1] | ||
649 | assert localfilepath.startswith("/") | ||
650 | |||
651 | identifier = event.taskfile + ":" + event.taskname | ||
652 | |||
653 | recipe_information = self._get_recipe_information_from_taskfile(event.taskfile) | ||
654 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True) | ||
655 | |||
656 | task_information = self._get_task_information(event, recipe) | ||
657 | task_information['outcome'] = Task.OUTCOME_NA | ||
658 | |||
659 | if isinstance(event, bb.runqueue.runQueueTaskSkipped): | ||
660 | assert 'reason' in vars(event) | ||
661 | task_information['task_executed'] = False | ||
662 | if event.reason == "covered": | ||
663 | task_information['outcome'] = Task.OUTCOME_COVERED | ||
664 | if event.reason == "existing": | ||
665 | task_information['outcome'] = Task.OUTCOME_PREBUILT | ||
666 | else: | ||
667 | task_information['task_executed'] = True | ||
668 | if 'noexec' in vars(event) and event.noexec == True: | ||
669 | task_information['task_executed'] = False | ||
670 | task_information['outcome'] = Task.OUTCOME_EMPTY | ||
671 | task_information['script_type'] = Task.CODING_NA | ||
672 | |||
673 | # do not assign order numbers to scene tasks | ||
674 | if not isinstance(event, bb.runqueue.sceneQueueTaskStarted): | ||
675 | self.task_order += 1 | ||
676 | task_information['order'] = self.task_order | ||
677 | |||
678 | task_obj = self.orm_wrapper.get_update_task_object(task_information) | ||
679 | |||
680 | self.internal_state['taskdata'][identifier] = { | ||
681 | 'outcome': task_information['outcome'], | ||
682 | } | ||
683 | |||
684 | |||
685 | def store_tasks_stats(self, event): | ||
686 | for (taskfile, taskname, taskstats, recipename) in event.data: | ||
687 | localfilepath = taskfile.split(":")[-1] | ||
688 | assert localfilepath.startswith("/") | ||
689 | |||
690 | recipe_information = self._get_recipe_information_from_taskfile(taskfile) | ||
691 | recipe_object = Recipe.objects.get(layer_version = recipe_information['layer_version'], | ||
692 | file_path__endswith = recipe_information['file_path'], | ||
693 | name = recipename) | ||
694 | |||
695 | task_information = {} | ||
696 | task_information['build'] = self.internal_state['build'] | ||
697 | task_information['recipe'] = recipe_object | ||
698 | task_information['task_name'] = taskname | ||
699 | task_information['cpu_usage'] = taskstats['cpu_usage'] | ||
700 | task_information['disk_io'] = taskstats['disk_io'] | ||
701 | task_obj = self.orm_wrapper.get_update_task_object(task_information, True) # must exist | ||
702 | |||
703 | def update_and_store_task(self, event): | ||
704 | assert 'taskfile' in vars(event) | ||
705 | localfilepath = event.taskfile.split(":")[-1] | ||
706 | assert localfilepath.startswith("/") | ||
707 | |||
708 | identifier = event.taskfile + ":" + event.taskname | ||
709 | if not identifier in self.internal_state['taskdata']: | ||
710 | if isinstance(event, bb.build.TaskBase): | ||
711 | # we do a bit of guessing | ||
712 | candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)] | ||
713 | if len(candidates) == 1: | ||
714 | identifier = candidates[0] | ||
715 | |||
716 | assert identifier in self.internal_state['taskdata'] | ||
717 | identifierlist = identifier.split(":") | ||
718 | realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1]) | ||
719 | recipe_information = self._get_recipe_information_from_taskfile(realtaskfile) | ||
720 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True) | ||
721 | task_information = self._get_task_information(event,recipe) | ||
722 | |||
723 | if 'time' in vars(event): | ||
724 | if not 'start_time' in self.internal_state['taskdata'][identifier]: | ||
725 | self.internal_state['taskdata'][identifier]['start_time'] = event.time | ||
726 | else: | ||
727 | task_information['end_time'] = event.time | ||
728 | task_information['start_time'] = self.internal_state['taskdata'][identifier]['start_time'] | ||
729 | |||
730 | task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome'] | ||
731 | |||
732 | if 'logfile' in vars(event): | ||
733 | task_information['logfile'] = event.logfile | ||
734 | |||
735 | if '_message' in vars(event): | ||
736 | task_information['message'] = event._message | ||
737 | |||
738 | if 'taskflags' in vars(event): | ||
739 | # with TaskStarted, we get even more information | ||
740 | if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1': | ||
741 | task_information['script_type'] = Task.CODING_PYTHON | ||
742 | else: | ||
743 | task_information['script_type'] = Task.CODING_SHELL | ||
744 | |||
745 | if task_information['outcome'] == Task.OUTCOME_NA: | ||
746 | if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)): | ||
747 | task_information['outcome'] = Task.OUTCOME_SUCCESS | ||
748 | del self.internal_state['taskdata'][identifier] | ||
749 | |||
750 | if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)): | ||
751 | task_information['outcome'] = Task.OUTCOME_FAILED | ||
752 | del self.internal_state['taskdata'][identifier] | ||
753 | |||
754 | self.orm_wrapper.get_update_task_object(task_information, True) # must exist | ||
755 | |||
756 | |||
757 | def store_missed_state_tasks(self, event): | ||
758 | for (fn, taskname, taskhash, sstatefile) in event.data['missed']: | ||
759 | |||
760 | identifier = fn + taskname + "_setscene" | ||
761 | recipe_information = self._get_recipe_information_from_taskfile(fn) | ||
762 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_information) | ||
763 | class MockEvent: pass | ||
764 | mevent = MockEvent() | ||
765 | mevent.taskname = taskname | ||
766 | mevent.taskhash = taskhash | ||
767 | task_information = self._get_task_information(mevent,recipe) | ||
768 | |||
769 | task_information['start_time'] = datetime.datetime.now() | ||
770 | task_information['outcome'] = Task.OUTCOME_NA | ||
771 | task_information['sstate_checksum'] = taskhash | ||
772 | task_information['sstate_result'] = Task.SSTATE_MISS | ||
773 | task_information['path_to_sstate_obj'] = sstatefile | ||
774 | |||
775 | self.orm_wrapper.get_update_task_object(task_information) | ||
776 | |||
777 | for (fn, taskname, taskhash, sstatefile) in event.data['found']: | ||
778 | |||
779 | identifier = fn + taskname + "_setscene" | ||
780 | recipe_information = self._get_recipe_information_from_taskfile(fn) | ||
781 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_information) | ||
782 | class MockEvent: pass | ||
783 | mevent = MockEvent() | ||
784 | mevent.taskname = taskname | ||
785 | mevent.taskhash = taskhash | ||
786 | task_information = self._get_task_information(mevent,recipe) | ||
787 | |||
788 | task_information['path_to_sstate_obj'] = sstatefile | ||
789 | |||
790 | self.orm_wrapper.get_update_task_object(task_information) | ||
791 | |||
792 | |||
793 | def store_target_package_data(self, event): | ||
794 | assert 'data' in vars(event) | ||
795 | # for all image targets | ||
796 | for target in self.internal_state['targets']: | ||
797 | if target.is_image: | ||
798 | try: | ||
799 | pkgdata = event.data['pkgdata'] | ||
800 | imgdata = event.data['imgdata'][target.target] | ||
801 | self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes']) | ||
802 | filedata = event.data['filedata'][target.target] | ||
803 | self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata) | ||
804 | except KeyError: | ||
805 | # we must have not got the data for this image, nothing to save | ||
806 | pass | ||
807 | |||
808 | |||
809 | |||
810 | def store_dependency_information(self, event): | ||
811 | assert '_depgraph' in vars(event) | ||
812 | assert 'layer-priorities' in event._depgraph | ||
813 | assert 'pn' in event._depgraph | ||
814 | assert 'tdepends' in event._depgraph | ||
815 | |||
816 | errormsg = "" | ||
817 | |||
818 | # save layer version priorities | ||
819 | if 'layer-priorities' in event._depgraph.keys(): | ||
820 | for lv in event._depgraph['layer-priorities']: | ||
821 | (name, path, regexp, priority) = lv | ||
822 | layer_version_obj = self._get_layer_version_for_path(path[1:]) # paths start with a ^ | ||
823 | assert layer_version_obj is not None | ||
824 | layer_version_obj.priority = priority | ||
825 | layer_version_obj.save() | ||
826 | |||
827 | # save recipe information | ||
828 | self.internal_state['recipes'] = {} | ||
829 | for pn in event._depgraph['pn']: | ||
830 | |||
831 | file_name = event._depgraph['pn'][pn]['filename'] | ||
832 | layer_version_obj = self._get_layer_version_for_path(file_name.split(":")[-1]) | ||
833 | |||
834 | assert layer_version_obj is not None | ||
835 | |||
836 | recipe_info = {} | ||
837 | recipe_info['name'] = pn | ||
838 | recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":") | ||
839 | recipe_info['layer_version'] = layer_version_obj | ||
840 | recipe_info['summary'] = event._depgraph['pn'][pn]['summary'] | ||
841 | recipe_info['license'] = event._depgraph['pn'][pn]['license'] | ||
842 | recipe_info['description'] = event._depgraph['pn'][pn]['description'] | ||
843 | recipe_info['section'] = event._depgraph['pn'][pn]['section'] | ||
844 | recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage'] | ||
845 | recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker'] | ||
846 | recipe_info['file_path'] = file_name | ||
847 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_info) | ||
848 | recipe.is_image = False | ||
849 | if 'inherits' in event._depgraph['pn'][pn].keys(): | ||
850 | for cls in event._depgraph['pn'][pn]['inherits']: | ||
851 | if cls.endswith('/image.bbclass'): | ||
852 | recipe.is_image = True | ||
853 | break | ||
854 | if recipe.is_image: | ||
855 | for t in self.internal_state['targets']: | ||
856 | if pn == t.target: | ||
857 | t.is_image = True | ||
858 | t.save() | ||
859 | self.internal_state['recipes'][pn] = recipe | ||
860 | |||
861 | # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED | ||
862 | |||
863 | assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split() | ||
864 | |||
865 | # save recipe dependency | ||
866 | # buildtime | ||
867 | for recipe in event._depgraph['depends']: | ||
868 | try: | ||
869 | target = self.internal_state['recipes'][recipe] | ||
870 | for dep in event._depgraph['depends'][recipe]: | ||
871 | dependency = self.internal_state['recipes'][dep] | ||
872 | Recipe_Dependency.objects.get_or_create( recipe = target, | ||
873 | depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS) | ||
874 | except KeyError as e: | ||
875 | if e not in assume_provided and not str(e).startswith("virtual/"): | ||
876 | errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, e) | ||
877 | |||
878 | # save all task information | ||
879 | def _save_a_task(taskdesc): | ||
880 | spec = re.split(r'\.', taskdesc); | ||
881 | pn = ".".join(spec[0:-1]) | ||
882 | taskname = spec[-1] | ||
883 | e = event | ||
884 | e.taskname = pn | ||
885 | recipe = self.internal_state['recipes'][pn] | ||
886 | task_info = self._get_task_information(e, recipe) | ||
887 | task_info['task_name'] = taskname | ||
888 | task_obj = self.orm_wrapper.get_update_task_object(task_info) | ||
889 | return task_obj | ||
890 | |||
891 | # create tasks | ||
892 | tasks = {} | ||
893 | for taskdesc in event._depgraph['tdepends']: | ||
894 | tasks[taskdesc] = _save_a_task(taskdesc) | ||
895 | |||
896 | # create dependencies between tasks | ||
897 | for taskdesc in event._depgraph['tdepends']: | ||
898 | target = tasks[taskdesc] | ||
899 | for taskdep in event._depgraph['tdepends'][taskdesc]: | ||
900 | if taskdep not in tasks: | ||
901 | # Fetch tasks info is not collected previously | ||
902 | dep = _save_a_task(taskdep) | ||
903 | else: | ||
904 | dep = tasks[taskdep] | ||
905 | Task_Dependency.objects.get_or_create( task = target, depends_on = dep ) | ||
906 | |||
907 | if (len(errormsg) > 0): | ||
908 | raise Exception(errormsg) | ||
909 | |||
910 | |||
911 | def store_build_package_information(self, event): | ||
912 | assert 'data' in vars(event) | ||
913 | package_info = event.data | ||
914 | self.orm_wrapper.save_build_package_information(self.internal_state['build'], | ||
915 | package_info, | ||
916 | self.internal_state['recipes'], | ||
917 | ) | ||
918 | |||
919 | def _store_log_information(self, level, text): | ||
920 | log_information = {} | ||
921 | log_information['build'] = self.internal_state['build'] | ||
922 | log_information['level'] = level | ||
923 | log_information['message'] = text | ||
924 | self.orm_wrapper.create_logmessage(log_information) | ||
925 | |||
926 | def store_log_info(self, text): | ||
927 | self._store_log_information(LogMessage.INFO, text) | ||
928 | |||
929 | def store_log_warn(self, text): | ||
930 | self._store_log_information(LogMessage.WARNING, text) | ||
931 | |||
932 | def store_log_error(self, text): | ||
933 | self._store_log_information(LogMessage.ERROR, text) | ||
934 | |||
935 | def store_log_event(self, event): | ||
936 | if 'build' in self.internal_state and 'backlog' in self.internal_state: | ||
937 | if len(self.internal_state['backlog']): | ||
938 | tempevent = self.internal_state['backlog'].pop() | ||
939 | print "Saving stored event ", tempevent | ||
940 | self.store_log_event(tempevent) | ||
941 | else: | ||
942 | del self.internal_state['backlog'] | ||
943 | |||
944 | if event.levelno < format.WARNING: | ||
945 | return | ||
946 | |||
947 | if not 'build' in self.internal_state: | ||
948 | print "Save event for later" | ||
949 | if not 'backlog' in self.internal_state: | ||
950 | self.internal_state['backlog'] = [] | ||
951 | self.internal_state['backlog'].append(event) | ||
952 | |||
953 | return | ||
954 | log_information = {} | ||
955 | log_information['build'] = self.internal_state['build'] | ||
956 | if event.levelno >= format.ERROR: | ||
957 | log_information['level'] = LogMessage.ERROR | ||
958 | elif event.levelno == format.WARNING: | ||
959 | log_information['level'] = LogMessage.WARNING | ||
960 | log_information['message'] = event.msg | ||
961 | log_information['pathname'] = event.pathname | ||
962 | log_information['lineno'] = event.lineno | ||
963 | self.orm_wrapper.create_logmessage(log_information) | ||
964 | |||