diff options
author | Alexandru DAMIAN <alexandru.damian@intel.com> | 2014-11-04 16:47:36 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2014-11-12 17:04:48 +0000 |
commit | 3e9fc8d0916f1d51dd6b748fff966d8aafd7f438 (patch) | |
tree | 5cd0a018efd4447f8ddff50724aa049037eb40c5 /bitbake/lib/bb | |
parent | 0ca70ce37aa8cec6a74ec874a7b11597b608c403 (diff) | |
download | poky-3e9fc8d0916f1d51dd6b748fff966d8aafd7f438.tar.gz |
bitbake: toasterui: performance improvements
Improve the performance of data logging in toasterui.
We modify the data queries used to:
* cache searching in memory
* insert in bulk (i.e. multiple values per insert, where possible)
On development test rig (networked mysql), on no-op build,
time for data recording is reduced from 4:10 to 1:30 (minutes).
We also improve the logging, so it is easier to detect
toasterui errors.
(Bitbake rev: d42784432f927f58730caf80546c66772e0fec89)
Signed-off-by: Alexandru DAMIAN <alexandru.damian@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r-- | bitbake/lib/bb/ui/buildinfohelper.py | 207 | ||||
-rw-r--r-- | bitbake/lib/bb/ui/toasterui.py | 10 |
2 files changed, 156 insertions, 61 deletions
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py index b5ae9e97b2..a907a0337b 100644 --- a/bitbake/lib/bb/ui/buildinfohelper.py +++ b/bitbake/lib/bb/ui/buildinfohelper.py | |||
@@ -32,6 +32,7 @@ from toaster.orm.models import Package, Package_File, Target_Installed_Package, | |||
32 | from toaster.orm.models import Task_Dependency, Package_Dependency | 32 | from toaster.orm.models import Task_Dependency, Package_Dependency |
33 | from toaster.orm.models import Recipe_Dependency | 33 | from toaster.orm.models import Recipe_Dependency |
34 | from bb.msg import BBLogFormatter as format | 34 | from bb.msg import BBLogFormatter as format |
35 | from django.db import models | ||
35 | 36 | ||
36 | class NotExisting(Exception): | 37 | class NotExisting(Exception): |
37 | pass | 38 | pass |
@@ -43,8 +44,57 @@ class ORMWrapper(object): | |||
43 | """ | 44 | """ |
44 | 45 | ||
45 | def __init__(self): | 46 | def __init__(self): |
47 | self.layer_version_objects = [] | ||
48 | self.task_objects = {} | ||
49 | self.recipe_objects = {} | ||
46 | pass | 50 | pass |
47 | 51 | ||
52 | @staticmethod | ||
53 | def _build_key(**kwargs): | ||
54 | key = "0" | ||
55 | for k in sorted(kwargs.keys()): | ||
56 | if isinstance(kwargs[k], models.Model): | ||
57 | key += "-%d" % kwargs[k].id | ||
58 | else: | ||
59 | key += "-%s" % str(kwargs[k]) | ||
60 | return key | ||
61 | |||
62 | |||
63 | def _cached_get_or_create(self, clazz, **kwargs): | ||
64 | """ This is a memory-cached get_or_create. We assume that the objects will not be created in the | ||
65 | database through any other means. | ||
66 | """ | ||
67 | |||
68 | assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument" | ||
69 | |||
70 | key = ORMWrapper._build_key(**kwargs) | ||
71 | dictname = "objects_%s" % clazz.__name__ | ||
72 | if not dictname in vars(self).keys(): | ||
73 | vars(self)[dictname] = {} | ||
74 | |||
75 | created = False | ||
76 | if not key in vars(self)[dictname].keys(): | ||
77 | vars(self)[dictname][key] = clazz.objects.create(**kwargs) | ||
78 | created = True | ||
79 | |||
80 | return (vars(self)[dictname][key], created) | ||
81 | |||
82 | |||
83 | def _cached_get(self, clazz, **kwargs): | ||
84 | """ This is a memory-cached get. We assume that the objects will not change in the database between gets. | ||
85 | """ | ||
86 | assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument" | ||
87 | |||
88 | key = ORMWrapper._build_key(**kwargs) | ||
89 | dictname = "objects_%s" % clazz.__name__ | ||
90 | |||
91 | if not dictname in vars(self).keys(): | ||
92 | vars(self)[dictname] = {} | ||
93 | |||
94 | if not key in vars(self)[dictname].keys(): | ||
95 | vars(self)[dictname][key] = clazz.objects.get(**kwargs) | ||
96 | |||
97 | return vars(self)[dictname][key] | ||
48 | 98 | ||
49 | def create_build_object(self, build_info, brbe): | 99 | def create_build_object(self, build_info, brbe): |
50 | assert 'machine' in build_info | 100 | assert 'machine' in build_info |
@@ -87,7 +137,7 @@ class ORMWrapper(object): | |||
87 | tgt_object = Target.objects.create( build = target_info['build'], | 137 | tgt_object = Target.objects.create( build = target_info['build'], |
88 | target = tgt_name, | 138 | target = tgt_name, |
89 | is_image = False, | 139 | is_image = False, |
90 | ); | 140 | ) |
91 | targets.append(tgt_object) | 141 | targets.append(tgt_object) |
92 | return targets | 142 | return targets |
93 | 143 | ||
@@ -117,41 +167,47 @@ class ORMWrapper(object): | |||
117 | assert 'recipe' in task_information | 167 | assert 'recipe' in task_information |
118 | assert 'task_name' in task_information | 168 | assert 'task_name' in task_information |
119 | 169 | ||
120 | task_object, created = Task.objects.get_or_create( | 170 | # we use must_exist info for database look-up optimization |
121 | build=task_information['build'], | 171 | task_object, created = self._cached_get_or_create(Task, |
122 | recipe=task_information['recipe'], | 172 | build=task_information['build'], |
123 | task_name=task_information['task_name'], | 173 | recipe=task_information['recipe'], |
124 | ) | 174 | task_name=task_information['task_name'] |
125 | 175 | ) | |
126 | if must_exist and created: | 176 | if created and must_exist: |
127 | task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk) | 177 | task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk) |
128 | task_object.delete() | 178 | raise NotExisting("Task object created when expected to exist", task_information) |
129 | raise NotExisting("Task object created when expected to exist", task_information) | ||
130 | 179 | ||
180 | object_changed = False | ||
131 | for v in vars(task_object): | 181 | for v in vars(task_object): |
132 | if v in task_information.keys(): | 182 | if v in task_information.keys(): |
133 | vars(task_object)[v] = task_information[v] | 183 | if vars(task_object)[v] != task_information[v]: |
184 | vars(task_object)[v] = task_information[v] | ||
185 | object_changed = True | ||
134 | 186 | ||
135 | # update setscene-related information | 187 | # update setscene-related information if the task was just created |
136 | if 1 == Task.objects.related_setscene(task_object).count(): | 188 | if created and task_object.outcome == Task.OUTCOME_COVERED and 1 == Task.objects.related_setscene(task_object).count(): |
137 | if task_object.outcome == Task.OUTCOME_COVERED: | 189 | task_object.outcome = Task.OUTCOME_CACHED |
138 | task_object.outcome = Task.OUTCOME_CACHED | 190 | object_changed = True |
139 | 191 | ||
140 | outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build, | 192 | outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build, |
141 | recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome | 193 | recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome |
142 | if outcome_task_setscene == Task.OUTCOME_SUCCESS: | 194 | if outcome_task_setscene == Task.OUTCOME_SUCCESS: |
143 | task_object.sstate_result = Task.SSTATE_RESTORED | 195 | task_object.sstate_result = Task.SSTATE_RESTORED |
196 | object_changed = True | ||
144 | elif outcome_task_setscene == Task.OUTCOME_FAILED: | 197 | elif outcome_task_setscene == Task.OUTCOME_FAILED: |
145 | task_object.sstate_result = Task.SSTATE_FAILED | 198 | task_object.sstate_result = Task.SSTATE_FAILED |
199 | object_changed = True | ||
146 | 200 | ||
147 | # mark down duration if we have a start time and a current time | 201 | # mark down duration if we have a start time and a current time |
148 | if 'start_time' in task_information.keys() and 'end_time' in task_information.keys(): | 202 | if 'start_time' in task_information.keys() and 'end_time' in task_information.keys(): |
149 | duration = task_information['end_time'] - task_information['start_time'] | 203 | duration = task_information['end_time'] - task_information['start_time'] |
150 | task_object.elapsed_time = duration | 204 | task_object.elapsed_time = duration |
205 | object_changed = True | ||
151 | del task_information['start_time'] | 206 | del task_information['start_time'] |
152 | del task_information['end_time'] | 207 | del task_information['end_time'] |
153 | 208 | ||
154 | task_object.save() | 209 | if object_changed: |
210 | task_object.save() | ||
155 | return task_object | 211 | return task_object |
156 | 212 | ||
157 | 213 | ||
@@ -159,20 +215,19 @@ class ORMWrapper(object): | |||
159 | assert 'layer_version' in recipe_information | 215 | assert 'layer_version' in recipe_information |
160 | assert 'file_path' in recipe_information | 216 | assert 'file_path' in recipe_information |
161 | 217 | ||
162 | 218 | recipe_object, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'], | |
163 | recipe_object, created = Recipe.objects.get_or_create( | 219 | file_path=recipe_information['file_path']) |
164 | layer_version=recipe_information['layer_version'], | 220 | if created and must_exist: |
165 | file_path=recipe_information['file_path']) | ||
166 | |||
167 | if must_exist and created: | ||
168 | recipe_object.delete() | ||
169 | raise NotExisting("Recipe object created when expected to exist", recipe_information) | 221 | raise NotExisting("Recipe object created when expected to exist", recipe_information) |
170 | 222 | ||
223 | object_changed = False | ||
171 | for v in vars(recipe_object): | 224 | for v in vars(recipe_object): |
172 | if v in recipe_information.keys(): | 225 | if v in recipe_information.keys(): |
226 | object_changed = True | ||
173 | vars(recipe_object)[v] = recipe_information[v] | 227 | vars(recipe_object)[v] = recipe_information[v] |
174 | 228 | ||
175 | recipe_object.save() | 229 | if object_changed: |
230 | recipe_object.save() | ||
176 | 231 | ||
177 | return recipe_object | 232 | return recipe_object |
178 | 233 | ||
@@ -191,6 +246,8 @@ class ORMWrapper(object): | |||
191 | priority = layer_version_information['priority'] | 246 | priority = layer_version_information['priority'] |
192 | ) | 247 | ) |
193 | 248 | ||
249 | self.layer_version_objects.append(layer_version_object) | ||
250 | |||
194 | return layer_version_object | 251 | return layer_version_object |
195 | 252 | ||
196 | def get_update_layer_object(self, layer_information): | 253 | def get_update_layer_object(self, layer_information): |
@@ -235,7 +292,7 @@ class ORMWrapper(object): | |||
235 | parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) | 292 | parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) |
236 | if len(parent_path) == 0: | 293 | if len(parent_path) == 0: |
237 | parent_path = "/" | 294 | parent_path = "/" |
238 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | 295 | parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) |
239 | tf_obj = Target_File.objects.create( | 296 | tf_obj = Target_File.objects.create( |
240 | target = target_obj, | 297 | target = target_obj, |
241 | path = path, | 298 | path = path, |
@@ -269,7 +326,7 @@ class ORMWrapper(object): | |||
269 | permission = permission, | 326 | permission = permission, |
270 | owner = user, | 327 | owner = user, |
271 | group = group) | 328 | group = group) |
272 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | 329 | parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) |
273 | tf_obj.directory = parent_obj | 330 | tf_obj.directory = parent_obj |
274 | tf_obj.save() | 331 | tf_obj.save() |
275 | 332 | ||
@@ -324,7 +381,7 @@ class ORMWrapper(object): | |||
324 | searchname = pkgpnmap[p]['OPKGN'] | 381 | searchname = pkgpnmap[p]['OPKGN'] |
325 | 382 | ||
326 | packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname ) | 383 | packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname ) |
327 | if True: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887] | 384 | if created or package[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887] |
328 | # fill in everything we can from the runtime-reverse package data | 385 | # fill in everything we can from the runtime-reverse package data |
329 | try: | 386 | try: |
330 | packagedict[p]['object'].recipe = recipes[pkgpnmap[p]['PN']] | 387 | packagedict[p]['object'].recipe = recipes[pkgpnmap[p]['PN']] |
@@ -338,11 +395,14 @@ class ORMWrapper(object): | |||
338 | packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE']) | 395 | packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE']) |
339 | 396 | ||
340 | # no files recorded for this package, so save files info | 397 | # no files recorded for this package, so save files info |
398 | packagefile_objects = [] | ||
341 | for targetpath in pkgpnmap[p]['FILES_INFO']: | 399 | for targetpath in pkgpnmap[p]['FILES_INFO']: |
342 | targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath] | 400 | targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath] |
343 | Package_File.objects.create( package = packagedict[p]['object'], | 401 | packagefile_objects.append(Package_File( package = packagedict[p]['object'], |
344 | path = targetpath, | 402 | path = targetpath, |
345 | size = targetfilesize) | 403 | size = targetfilesize)) |
404 | if len(packagefile_objects): | ||
405 | Package_File.objects.bulk_create(packagefile_objects) | ||
346 | except KeyError as e: | 406 | except KeyError as e: |
347 | errormsg += " stpi: Key error, package %s key %s \n" % ( p, e ) | 407 | errormsg += " stpi: Key error, package %s key %s \n" % ( p, e ) |
348 | 408 | ||
@@ -352,6 +412,7 @@ class ORMWrapper(object): | |||
352 | 412 | ||
353 | Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object']) | 413 | Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object']) |
354 | 414 | ||
415 | packagedeps_objs = [] | ||
355 | for p in packagedict: | 416 | for p in packagedict: |
356 | for (px,deptype) in packagedict[p]['depends']: | 417 | for (px,deptype) in packagedict[p]['depends']: |
357 | if deptype == 'depends': | 418 | if deptype == 'depends': |
@@ -359,10 +420,13 @@ class ORMWrapper(object): | |||
359 | elif deptype == 'recommends': | 420 | elif deptype == 'recommends': |
360 | tdeptype = Package_Dependency.TYPE_TRECOMMENDS | 421 | tdeptype = Package_Dependency.TYPE_TRECOMMENDS |
361 | 422 | ||
362 | Package_Dependency.objects.create( package = packagedict[p]['object'], | 423 | packagedeps_objs.append(Package_Dependency( package = packagedict[p]['object'], |
363 | depends_on = packagedict[px]['object'], | 424 | depends_on = packagedict[px]['object'], |
364 | dep_type = tdeptype, | 425 | dep_type = tdeptype, |
365 | target = target_obj); | 426 | target = target_obj)) |
427 | |||
428 | if len(packagedeps_objs) > 0: | ||
429 | Package_Dependency.objects.bulk_create(packagedeps_objs) | ||
366 | 430 | ||
367 | if (len(errormsg) > 0): | 431 | if (len(errormsg) > 0): |
368 | raise Exception(errormsg) | 432 | raise Exception(errormsg) |
@@ -398,7 +462,7 @@ class ORMWrapper(object): | |||
398 | if 'OPKGN' in package_info.keys(): | 462 | if 'OPKGN' in package_info.keys(): |
399 | pname = package_info['OPKGN'] | 463 | pname = package_info['OPKGN'] |
400 | 464 | ||
401 | bp_object, created = Package.objects.get_or_create( build = build_obj, | 465 | bp_object = Package.objects.create( build = build_obj, |
402 | name = pname ) | 466 | name = pname ) |
403 | 467 | ||
404 | bp_object.installed_name = package_info['PKG'] | 468 | bp_object.installed_name = package_info['PKG'] |
@@ -413,10 +477,13 @@ class ORMWrapper(object): | |||
413 | bp_object.save() | 477 | bp_object.save() |
414 | 478 | ||
415 | # save any attached file information | 479 | # save any attached file information |
480 | packagefile_objects = [] | ||
416 | for path in package_info['FILES_INFO']: | 481 | for path in package_info['FILES_INFO']: |
417 | fo = Package_File.objects.create( package = bp_object, | 482 | packagefile_objects.append(Package_File( package = bp_object, |
418 | path = path, | 483 | path = path, |
419 | size = package_info['FILES_INFO'][path] ) | 484 | size = package_info['FILES_INFO'][path] )) |
485 | if len(packagefile_objects): | ||
486 | Package_File.objects.bulk_create(packagefile_objects) | ||
420 | 487 | ||
421 | def _po_byname(p): | 488 | def _po_byname(p): |
422 | pkg, created = Package.objects.get_or_create(build = build_obj, name = p) | 489 | pkg, created = Package.objects.get_or_create(build = build_obj, name = p) |
@@ -425,39 +492,45 @@ class ORMWrapper(object): | |||
425 | pkg.save() | 492 | pkg.save() |
426 | return pkg | 493 | return pkg |
427 | 494 | ||
495 | packagedeps_objs = [] | ||
428 | # save soft dependency information | 496 | # save soft dependency information |
429 | if 'RDEPENDS' in package_info and package_info['RDEPENDS']: | 497 | if 'RDEPENDS' in package_info and package_info['RDEPENDS']: |
430 | for p in bb.utils.explode_deps(package_info['RDEPENDS']): | 498 | for p in bb.utils.explode_deps(package_info['RDEPENDS']): |
431 | Package_Dependency.objects.get_or_create( package = bp_object, | 499 | packagedeps_objs.append(Package_Dependency( package = bp_object, |
432 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS) | 500 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS)) |
433 | if 'RPROVIDES' in package_info and package_info['RPROVIDES']: | 501 | if 'RPROVIDES' in package_info and package_info['RPROVIDES']: |
434 | for p in bb.utils.explode_deps(package_info['RPROVIDES']): | 502 | for p in bb.utils.explode_deps(package_info['RPROVIDES']): |
435 | Package_Dependency.objects.get_or_create( package = bp_object, | 503 | packagedeps_objs.append(Package_Dependency( package = bp_object, |
436 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES) | 504 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES)) |
437 | if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']: | 505 | if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']: |
438 | for p in bb.utils.explode_deps(package_info['RRECOMMENDS']): | 506 | for p in bb.utils.explode_deps(package_info['RRECOMMENDS']): |
439 | Package_Dependency.objects.get_or_create( package = bp_object, | 507 | packagedeps_objs.append(Package_Dependency( package = bp_object, |
440 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS) | 508 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS)) |
441 | if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']: | 509 | if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']: |
442 | for p in bb.utils.explode_deps(package_info['RSUGGESTS']): | 510 | for p in bb.utils.explode_deps(package_info['RSUGGESTS']): |
443 | Package_Dependency.objects.get_or_create( package = bp_object, | 511 | packagedeps_objs.append(Package_Dependency( package = bp_object, |
444 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS) | 512 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS)) |
445 | if 'RREPLACES' in package_info and package_info['RREPLACES']: | 513 | if 'RREPLACES' in package_info and package_info['RREPLACES']: |
446 | for p in bb.utils.explode_deps(package_info['RREPLACES']): | 514 | for p in bb.utils.explode_deps(package_info['RREPLACES']): |
447 | Package_Dependency.objects.get_or_create( package = bp_object, | 515 | packagedeps_objs.append(Package_Dependency( package = bp_object, |
448 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES) | 516 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES)) |
449 | if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']: | 517 | if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']: |
450 | for p in bb.utils.explode_deps(package_info['RCONFLICTS']): | 518 | for p in bb.utils.explode_deps(package_info['RCONFLICTS']): |
451 | Package_Dependency.objects.get_or_create( package = bp_object, | 519 | packagedeps_objs.append(Package_Dependency( package = bp_object, |
452 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS) | 520 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)) |
521 | |||
522 | if len(packagedeps_objs) > 0: | ||
523 | Package_Dependency.objects.bulk_create(packagedeps_objs) | ||
453 | 524 | ||
454 | return bp_object | 525 | return bp_object |
455 | 526 | ||
456 | def save_build_variables(self, build_obj, vardump): | 527 | def save_build_variables(self, build_obj, vardump): |
457 | assert isinstance(build_obj, Build) | 528 | assert isinstance(build_obj, Build) |
458 | 529 | ||
530 | helptext_objects = [] | ||
531 | |||
459 | for k in vardump: | 532 | for k in vardump: |
460 | desc = vardump[k]['doc']; | 533 | desc = vardump[k]['doc'] |
461 | if desc is None: | 534 | if desc is None: |
462 | var_words = [word for word in k.split('_')] | 535 | var_words = [word for word in k.split('_')] |
463 | root_var = "_".join([word for word in var_words if word.isupper()]) | 536 | root_var = "_".join([word for word in var_words if word.isupper()]) |
@@ -465,25 +538,31 @@ class ORMWrapper(object): | |||
465 | desc = vardump[root_var]['doc'] | 538 | desc = vardump[root_var]['doc'] |
466 | if desc is None: | 539 | if desc is None: |
467 | desc = '' | 540 | desc = '' |
468 | if desc: | 541 | if len(desc): |
469 | helptext_obj = HelpText.objects.create(build=build_obj, | 542 | helptext_objects.append(HelpText(build=build_obj, |
470 | area=HelpText.VARIABLE, | 543 | area=HelpText.VARIABLE, |
471 | key=k, | 544 | key=k, |
472 | text=desc) | 545 | text=desc)) |
473 | if not bool(vardump[k]['func']): | 546 | if not bool(vardump[k]['func']): |
474 | value = vardump[k]['v']; | 547 | value = vardump[k]['v'] |
475 | if value is None: | 548 | if value is None: |
476 | value = '' | 549 | value = '' |
477 | variable_obj = Variable.objects.create( build = build_obj, | 550 | variable_obj = Variable.objects.create( build = build_obj, |
478 | variable_name = k, | 551 | variable_name = k, |
479 | variable_value = value, | 552 | variable_value = value, |
480 | description = desc) | 553 | description = desc) |
554 | |||
555 | varhist_objects = [] | ||
481 | for vh in vardump[k]['history']: | 556 | for vh in vardump[k]['history']: |
482 | if not 'documentation.conf' in vh['file']: | 557 | if not 'documentation.conf' in vh['file']: |
483 | VariableHistory.objects.create( variable = variable_obj, | 558 | varhist_objects.append(VariableHistory( variable = variable_obj, |
484 | file_name = vh['file'], | 559 | file_name = vh['file'], |
485 | line_number = vh['line'], | 560 | line_number = vh['line'], |
486 | operation = vh['op']) | 561 | operation = vh['op'])) |
562 | if len(varhist_objects): | ||
563 | VariableHistory.objects.bulk_create(varhist_objects) | ||
564 | |||
565 | HelpText.objects.bulk_create(helptext_objects) | ||
487 | 566 | ||
488 | class MockEvent: pass # sometimes we mock an event, declare it here | 567 | class MockEvent: pass # sometimes we mock an event, declare it here |
489 | 568 | ||
@@ -555,7 +634,7 @@ class BuildInfoHelper(object): | |||
555 | 634 | ||
556 | # Heuristics: we always match recipe to the deepest layer path that | 635 | # Heuristics: we always match recipe to the deepest layer path that |
557 | # we can match to the recipe file path | 636 | # we can match to the recipe file path |
558 | for bl in sorted(Layer_Version.objects.filter(build = self.internal_state['build']), reverse=True, key=_slkey): | 637 | for bl in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey): |
559 | if (path.startswith(bl.layer.local_path)): | 638 | if (path.startswith(bl.layer.local_path)): |
560 | return bl | 639 | return bl |
561 | 640 | ||
@@ -615,6 +694,7 @@ class BuildInfoHelper(object): | |||
615 | 694 | ||
616 | def store_started_build(self, event): | 695 | def store_started_build(self, event): |
617 | assert '_pkgs' in vars(event) | 696 | assert '_pkgs' in vars(event) |
697 | assert 'lvs' in self.internal_state, "Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass." | ||
618 | build_information = self._get_build_information() | 698 | build_information = self._get_build_information() |
619 | 699 | ||
620 | build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe) | 700 | build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe) |
@@ -885,20 +965,22 @@ class BuildInfoHelper(object): | |||
885 | 965 | ||
886 | # save recipe dependency | 966 | # save recipe dependency |
887 | # buildtime | 967 | # buildtime |
968 | recipedeps_objects = [] | ||
888 | for recipe in event._depgraph['depends']: | 969 | for recipe in event._depgraph['depends']: |
889 | try: | 970 | try: |
890 | target = self.internal_state['recipes'][recipe] | 971 | target = self.internal_state['recipes'][recipe] |
891 | for dep in event._depgraph['depends'][recipe]: | 972 | for dep in event._depgraph['depends'][recipe]: |
892 | dependency = self.internal_state['recipes'][dep] | 973 | dependency = self.internal_state['recipes'][dep] |
893 | Recipe_Dependency.objects.get_or_create( recipe = target, | 974 | recipedeps_objects.append(Recipe_Dependency( recipe = target, |
894 | depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS) | 975 | depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS)) |
895 | except KeyError as e: | 976 | except KeyError as e: |
896 | if e not in assume_provided and not str(e).startswith("virtual/"): | 977 | if e not in assume_provided and not str(e).startswith("virtual/"): |
897 | errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, e) | 978 | errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, e) |
979 | Recipe_Dependency.objects.bulk_create(recipedeps_objects) | ||
898 | 980 | ||
899 | # save all task information | 981 | # save all task information |
900 | def _save_a_task(taskdesc): | 982 | def _save_a_task(taskdesc): |
901 | spec = re.split(r'\.', taskdesc); | 983 | spec = re.split(r'\.', taskdesc) |
902 | pn = ".".join(spec[0:-1]) | 984 | pn = ".".join(spec[0:-1]) |
903 | taskname = spec[-1] | 985 | taskname = spec[-1] |
904 | e = event | 986 | e = event |
@@ -915,6 +997,7 @@ class BuildInfoHelper(object): | |||
915 | tasks[taskdesc] = _save_a_task(taskdesc) | 997 | tasks[taskdesc] = _save_a_task(taskdesc) |
916 | 998 | ||
917 | # create dependencies between tasks | 999 | # create dependencies between tasks |
1000 | taskdeps_objects = [] | ||
918 | for taskdesc in event._depgraph['tdepends']: | 1001 | for taskdesc in event._depgraph['tdepends']: |
919 | target = tasks[taskdesc] | 1002 | target = tasks[taskdesc] |
920 | for taskdep in event._depgraph['tdepends'][taskdesc]: | 1003 | for taskdep in event._depgraph['tdepends'][taskdesc]: |
@@ -923,7 +1006,8 @@ class BuildInfoHelper(object): | |||
923 | dep = _save_a_task(taskdep) | 1006 | dep = _save_a_task(taskdep) |
924 | else: | 1007 | else: |
925 | dep = tasks[taskdep] | 1008 | dep = tasks[taskdep] |
926 | Task_Dependency.objects.get_or_create( task = target, depends_on = dep ) | 1009 | taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep )) |
1010 | Task_Dependency.objects.bulk_create(taskdeps_objects) | ||
927 | 1011 | ||
928 | if (len(errormsg) > 0): | 1012 | if (len(errormsg) > 0): |
929 | raise Exception(errormsg) | 1013 | raise Exception(errormsg) |
@@ -955,6 +1039,8 @@ class BuildInfoHelper(object): | |||
955 | mockevent = MockEvent() | 1039 | mockevent = MockEvent() |
956 | mockevent.levelno = format.ERROR | 1040 | mockevent.levelno = format.ERROR |
957 | mockevent.msg = text | 1041 | mockevent.msg = text |
1042 | mockevent.pathname = '-- None' | ||
1043 | mockevent.lineno = -1 | ||
958 | self.store_log_event(mockevent) | 1044 | self.store_log_event(mockevent) |
959 | 1045 | ||
960 | def store_log_event(self, event): | 1046 | def store_log_event(self, event): |
@@ -980,9 +1066,10 @@ class BuildInfoHelper(object): | |||
980 | if 'build' in self.internal_state and 'backlog' in self.internal_state: | 1066 | if 'build' in self.internal_state and 'backlog' in self.internal_state: |
981 | if len(self.internal_state['backlog']): | 1067 | if len(self.internal_state['backlog']): |
982 | tempevent = self.internal_state['backlog'].pop() | 1068 | tempevent = self.internal_state['backlog'].pop() |
983 | print " Saving stored event ", tempevent | 1069 | print "DEBUG: Saving stored event ", tempevent |
984 | self.store_log_event(tempevent) | 1070 | self.store_log_event(tempevent) |
985 | else: | 1071 | else: |
1072 | print "ERROR: Events not saved: \n", self.internal_state['backlog'] | ||
986 | del self.internal_state['backlog'] | 1073 | del self.internal_state['backlog'] |
987 | 1074 | ||
988 | log_information = {} | 1075 | log_information = {} |
diff --git a/bitbake/lib/bb/ui/toasterui.py b/bitbake/lib/bb/ui/toasterui.py index 007c6b6114..b9e8029da1 100644 --- a/bitbake/lib/bb/ui/toasterui.py +++ b/bitbake/lib/bb/ui/toasterui.py | |||
@@ -295,9 +295,17 @@ def main(server, eventHandler, params ): | |||
295 | main.shutdown = 1 | 295 | main.shutdown = 1 |
296 | pass | 296 | pass |
297 | except Exception as e: | 297 | except Exception as e: |
298 | # print errors to log | ||
298 | logger.error(e) | 299 | logger.error(e) |
299 | import traceback | 300 | import traceback |
300 | traceback.print_exc() | 301 | exception_data = traceback.format_exc() |
302 | |||
303 | # save them to database, if possible; if it fails, we already logged to console. | ||
304 | try: | ||
305 | buildinfohelper.store_log_error("%s\n%s" % (str(e), exception_data)) | ||
306 | except Exception: | ||
307 | pass | ||
308 | |||
301 | pass | 309 | pass |
302 | 310 | ||
303 | if interrupted: | 311 | if interrupted: |