diff options
author | Tudor Florea <tudor.florea@enea.com> | 2015-10-09 22:59:03 +0200 |
---|---|---|
committer | Tudor Florea <tudor.florea@enea.com> | 2015-10-09 22:59:03 +0200 |
commit | 972dcfcdbfe75dcfeb777150c136576cf1a71e99 (patch) | |
tree | 97a61cd7e293d7ae9d56ef7ed0f81253365bb026 /bitbake/lib/bb/ui/buildinfohelper.py | |
download | poky-972dcfcdbfe75dcfeb777150c136576cf1a71e99.tar.gz |
initial commit for Enea Linux 5.0 arm
Signed-off-by: Tudor Florea <tudor.florea@enea.com>
Diffstat (limited to 'bitbake/lib/bb/ui/buildinfohelper.py')
-rw-r--r-- | bitbake/lib/bb/ui/buildinfohelper.py | 1023 |
1 files changed, 1023 insertions, 0 deletions
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py new file mode 100644 index 0000000000..8ca44a1a1d --- /dev/null +++ b/bitbake/lib/bb/ui/buildinfohelper.py | |||
@@ -0,0 +1,1023 @@ | |||
1 | # | ||
2 | # BitBake ToasterUI Implementation | ||
3 | # | ||
4 | # Copyright (C) 2013 Intel Corporation | ||
5 | # | ||
6 | # This program is free software; you can redistribute it and/or modify | ||
7 | # it under the terms of the GNU General Public License version 2 as | ||
8 | # published by the Free Software Foundation. | ||
9 | # | ||
10 | # This program is distributed in the hope that it will be useful, | ||
11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
13 | # GNU General Public License for more details. | ||
14 | # | ||
15 | # You should have received a copy of the GNU General Public License along | ||
16 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
17 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
18 | |||
19 | import datetime | ||
20 | import sys | ||
21 | import bb | ||
22 | import re | ||
23 | import ast | ||
24 | |||
25 | os.environ["DJANGO_SETTINGS_MODULE"] = "toaster.toastermain.settings" | ||
26 | |||
27 | import toaster.toastermain.settings as toaster_django_settings | ||
28 | from toaster.orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText | ||
29 | from toaster.orm.models import Target_Image_File | ||
30 | from toaster.orm.models import Variable, VariableHistory | ||
31 | from toaster.orm.models import Package, Package_File, Target_Installed_Package, Target_File | ||
32 | from toaster.orm.models import Task_Dependency, Package_Dependency | ||
33 | from toaster.orm.models import Recipe_Dependency | ||
34 | from bb.msg import BBLogFormatter as format | ||
35 | |||
36 | class NotExisting(Exception): | ||
37 | pass | ||
38 | |||
39 | class ORMWrapper(object): | ||
40 | """ This class creates the dictionaries needed to store information in the database | ||
41 | following the format defined by the Django models. It is also used to save this | ||
42 | information in the database. | ||
43 | """ | ||
44 | |||
45 | def __init__(self): | ||
46 | pass | ||
47 | |||
48 | |||
49 | def create_build_object(self, build_info, brbe): | ||
50 | assert 'machine' in build_info | ||
51 | assert 'distro' in build_info | ||
52 | assert 'distro_version' in build_info | ||
53 | assert 'started_on' in build_info | ||
54 | assert 'cooker_log_path' in build_info | ||
55 | assert 'build_name' in build_info | ||
56 | assert 'bitbake_version' in build_info | ||
57 | |||
58 | build = Build.objects.create( | ||
59 | machine=build_info['machine'], | ||
60 | distro=build_info['distro'], | ||
61 | distro_version=build_info['distro_version'], | ||
62 | started_on=build_info['started_on'], | ||
63 | completed_on=build_info['started_on'], | ||
64 | cooker_log_path=build_info['cooker_log_path'], | ||
65 | build_name=build_info['build_name'], | ||
66 | bitbake_version=build_info['bitbake_version']) | ||
67 | |||
68 | if brbe is not None: | ||
69 | from bldcontrol.models import BuildEnvironment, BuildRequest | ||
70 | br, be = brbe.split(":") | ||
71 | buildrequest = BuildRequest.objects.get(pk = br) | ||
72 | build.project_id = buildrequest.project_id | ||
73 | build.save() | ||
74 | |||
75 | return build | ||
76 | |||
77 | def create_target_objects(self, target_info): | ||
78 | assert 'build' in target_info | ||
79 | assert 'targets' in target_info | ||
80 | |||
81 | targets = [] | ||
82 | for tgt_name in target_info['targets']: | ||
83 | tgt_object = Target.objects.create( build = target_info['build'], | ||
84 | target = tgt_name, | ||
85 | is_image = False, | ||
86 | ); | ||
87 | targets.append(tgt_object) | ||
88 | return targets | ||
89 | |||
90 | def update_build_object(self, build, errors, warnings, taskfailures): | ||
91 | assert isinstance(build,Build) | ||
92 | assert isinstance(errors, int) | ||
93 | assert isinstance(warnings, int) | ||
94 | |||
95 | outcome = Build.SUCCEEDED | ||
96 | if errors or taskfailures: | ||
97 | outcome = Build.FAILED | ||
98 | |||
99 | build.completed_on = datetime.datetime.now() | ||
100 | build.timespent = int((build.completed_on - build.started_on).total_seconds()) | ||
101 | build.errors_no = errors | ||
102 | build.warnings_no = warnings | ||
103 | build.outcome = outcome | ||
104 | build.save() | ||
105 | |||
106 | def update_target_object(self, target, license_manifest_path): | ||
107 | |||
108 | target.license_manifest_path = license_manifest_path | ||
109 | target.save() | ||
110 | |||
111 | def get_update_task_object(self, task_information, must_exist = False): | ||
112 | assert 'build' in task_information | ||
113 | assert 'recipe' in task_information | ||
114 | assert 'task_name' in task_information | ||
115 | |||
116 | task_object, created = Task.objects.get_or_create( | ||
117 | build=task_information['build'], | ||
118 | recipe=task_information['recipe'], | ||
119 | task_name=task_information['task_name'], | ||
120 | ) | ||
121 | |||
122 | if must_exist and created: | ||
123 | task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk) | ||
124 | task_object.delete() | ||
125 | raise NotExisting("Task object created when expected to exist", task_information) | ||
126 | |||
127 | for v in vars(task_object): | ||
128 | if v in task_information.keys(): | ||
129 | vars(task_object)[v] = task_information[v] | ||
130 | |||
131 | # update setscene-related information | ||
132 | if 1 == Task.objects.related_setscene(task_object).count(): | ||
133 | if task_object.outcome == Task.OUTCOME_COVERED: | ||
134 | task_object.outcome = Task.OUTCOME_CACHED | ||
135 | |||
136 | outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build, | ||
137 | recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome | ||
138 | if outcome_task_setscene == Task.OUTCOME_SUCCESS: | ||
139 | task_object.sstate_result = Task.SSTATE_RESTORED | ||
140 | elif outcome_task_setscene == Task.OUTCOME_FAILED: | ||
141 | task_object.sstate_result = Task.SSTATE_FAILED | ||
142 | |||
143 | # mark down duration if we have a start time and a current time | ||
144 | if 'start_time' in task_information.keys() and 'end_time' in task_information.keys(): | ||
145 | duration = task_information['end_time'] - task_information['start_time'] | ||
146 | task_object.elapsed_time = duration | ||
147 | |||
148 | task_object.save() | ||
149 | return task_object | ||
150 | |||
151 | |||
152 | def get_update_recipe_object(self, recipe_information, must_exist = False): | ||
153 | assert 'layer_version' in recipe_information | ||
154 | assert 'file_path' in recipe_information | ||
155 | |||
156 | |||
157 | recipe_object, created = Recipe.objects.get_or_create( | ||
158 | layer_version=recipe_information['layer_version'], | ||
159 | file_path=recipe_information['file_path']) | ||
160 | |||
161 | if must_exist and created: | ||
162 | recipe_object.delete() | ||
163 | raise NotExisting("Recipe object created when expected to exist", recipe_information) | ||
164 | |||
165 | for v in vars(recipe_object): | ||
166 | if v in recipe_information.keys(): | ||
167 | vars(recipe_object)[v] = recipe_information[v] | ||
168 | |||
169 | recipe_object.save() | ||
170 | |||
171 | return recipe_object | ||
172 | |||
173 | def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information): | ||
174 | assert isinstance(build_obj, Build) | ||
175 | assert isinstance(layer_obj, Layer) | ||
176 | assert 'branch' in layer_version_information | ||
177 | assert 'commit' in layer_version_information | ||
178 | assert 'priority' in layer_version_information | ||
179 | |||
180 | layer_version_object, created = Layer_Version.objects.get_or_create( | ||
181 | build = build_obj, | ||
182 | layer = layer_obj, | ||
183 | branch = layer_version_information['branch'], | ||
184 | commit = layer_version_information['commit'], | ||
185 | priority = layer_version_information['priority'] | ||
186 | ) | ||
187 | |||
188 | return layer_version_object | ||
189 | |||
190 | def get_update_layer_object(self, layer_information): | ||
191 | assert 'name' in layer_information | ||
192 | assert 'local_path' in layer_information | ||
193 | assert 'layer_index_url' in layer_information | ||
194 | |||
195 | layer_object, created = Layer.objects.get_or_create( | ||
196 | name=layer_information['name'], | ||
197 | local_path=layer_information['local_path'], | ||
198 | layer_index_url=layer_information['layer_index_url']) | ||
199 | |||
200 | return layer_object | ||
201 | |||
202 | def save_target_file_information(self, build_obj, target_obj, filedata): | ||
203 | assert isinstance(build_obj, Build) | ||
204 | assert isinstance(target_obj, Target) | ||
205 | dirs = filedata['dirs'] | ||
206 | files = filedata['files'] | ||
207 | syms = filedata['syms'] | ||
208 | |||
209 | # we insert directories, ordered by name depth | ||
210 | for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))): | ||
211 | (user, group, size) = d[1:4] | ||
212 | permission = d[0][1:] | ||
213 | path = d[4].lstrip(".") | ||
214 | if len(path) == 0: | ||
215 | # we create the root directory as a special case | ||
216 | path = "/" | ||
217 | tf_obj = Target_File.objects.create( | ||
218 | target = target_obj, | ||
219 | path = path, | ||
220 | size = size, | ||
221 | inodetype = Target_File.ITYPE_DIRECTORY, | ||
222 | permission = permission, | ||
223 | owner = user, | ||
224 | group = group, | ||
225 | ) | ||
226 | tf_obj.directory = tf_obj | ||
227 | tf_obj.save() | ||
228 | continue | ||
229 | parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) | ||
230 | if len(parent_path) == 0: | ||
231 | parent_path = "/" | ||
232 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | ||
233 | tf_obj = Target_File.objects.create( | ||
234 | target = target_obj, | ||
235 | path = path, | ||
236 | size = size, | ||
237 | inodetype = Target_File.ITYPE_DIRECTORY, | ||
238 | permission = permission, | ||
239 | owner = user, | ||
240 | group = group, | ||
241 | directory = parent_obj) | ||
242 | |||
243 | |||
244 | # we insert files | ||
245 | for d in files: | ||
246 | (user, group, size) = d[1:4] | ||
247 | permission = d[0][1:] | ||
248 | path = d[4].lstrip(".") | ||
249 | parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) | ||
250 | inodetype = Target_File.ITYPE_REGULAR | ||
251 | if d[0].startswith('b'): | ||
252 | inodetype = Target_File.ITYPE_BLOCK | ||
253 | if d[0].startswith('c'): | ||
254 | inodetype = Target_File.ITYPE_CHARACTER | ||
255 | if d[0].startswith('p'): | ||
256 | inodetype = Target_File.ITYPE_FIFO | ||
257 | |||
258 | tf_obj = Target_File.objects.create( | ||
259 | target = target_obj, | ||
260 | path = path, | ||
261 | size = size, | ||
262 | inodetype = inodetype, | ||
263 | permission = permission, | ||
264 | owner = user, | ||
265 | group = group) | ||
266 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | ||
267 | tf_obj.directory = parent_obj | ||
268 | tf_obj.save() | ||
269 | |||
270 | # we insert symlinks | ||
271 | for d in syms: | ||
272 | (user, group, size) = d[1:4] | ||
273 | permission = d[0][1:] | ||
274 | path = d[4].lstrip(".") | ||
275 | filetarget_path = d[6] | ||
276 | |||
277 | parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) | ||
278 | if not filetarget_path.startswith("/"): | ||
279 | # we have a relative path, get a normalized absolute one | ||
280 | filetarget_path = parent_path + "/" + filetarget_path | ||
281 | fcp = filetarget_path.split("/") | ||
282 | fcpl = [] | ||
283 | for i in fcp: | ||
284 | if i == "..": | ||
285 | fcpl.pop() | ||
286 | else: | ||
287 | fcpl.append(i) | ||
288 | filetarget_path = "/".join(fcpl) | ||
289 | |||
290 | try: | ||
291 | filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path) | ||
292 | except Exception as e: | ||
293 | # we might have an invalid link; no way to detect this. just set it to None | ||
294 | filetarget_obj = None | ||
295 | |||
296 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | ||
297 | |||
298 | tf_obj = Target_File.objects.create( | ||
299 | target = target_obj, | ||
300 | path = path, | ||
301 | size = size, | ||
302 | inodetype = Target_File.ITYPE_SYMLINK, | ||
303 | permission = permission, | ||
304 | owner = user, | ||
305 | group = group, | ||
306 | directory = parent_obj, | ||
307 | sym_target = filetarget_obj) | ||
308 | |||
309 | |||
310 | def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes): | ||
311 | assert isinstance(build_obj, Build) | ||
312 | assert isinstance(target_obj, Target) | ||
313 | |||
314 | errormsg = "" | ||
315 | for p in packagedict: | ||
316 | searchname = p | ||
317 | if 'OPKGN' in pkgpnmap[p].keys(): | ||
318 | searchname = pkgpnmap[p]['OPKGN'] | ||
319 | |||
320 | packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname ) | ||
321 | if created: | ||
322 | # package was not build in the current build, but | ||
323 | # fill in everything we can from the runtime-reverse package data | ||
324 | try: | ||
325 | packagedict[p]['object'].recipe = recipes[pkgpnmap[p]['PN']] | ||
326 | packagedict[p]['object'].version = pkgpnmap[p]['PV'] | ||
327 | packagedict[p]['object'].installed_name = p | ||
328 | packagedict[p]['object'].revision = pkgpnmap[p]['PR'] | ||
329 | packagedict[p]['object'].license = pkgpnmap[p]['LICENSE'] | ||
330 | packagedict[p]['object'].section = pkgpnmap[p]['SECTION'] | ||
331 | packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY'] | ||
332 | packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION'] | ||
333 | packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE']) | ||
334 | |||
335 | # no files recorded for this package, so save files info | ||
336 | for targetpath in pkgpnmap[p]['FILES_INFO']: | ||
337 | targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath] | ||
338 | Package_File.objects.create( package = packagedict[p]['object'], | ||
339 | path = targetpath, | ||
340 | size = targetfilesize) | ||
341 | except KeyError as e: | ||
342 | errormsg += " stpi: Key error, package %s key %s \n" % ( p, e ) | ||
343 | |||
344 | # save disk installed size | ||
345 | packagedict[p]['object'].installed_size = packagedict[p]['size'] | ||
346 | packagedict[p]['object'].save() | ||
347 | |||
348 | Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object']) | ||
349 | |||
350 | for p in packagedict: | ||
351 | for (px,deptype) in packagedict[p]['depends']: | ||
352 | if deptype == 'depends': | ||
353 | tdeptype = Package_Dependency.TYPE_TRDEPENDS | ||
354 | elif deptype == 'recommends': | ||
355 | tdeptype = Package_Dependency.TYPE_TRECOMMENDS | ||
356 | |||
357 | Package_Dependency.objects.create( package = packagedict[p]['object'], | ||
358 | depends_on = packagedict[px]['object'], | ||
359 | dep_type = tdeptype, | ||
360 | target = target_obj); | ||
361 | |||
362 | if (len(errormsg) > 0): | ||
363 | raise Exception(errormsg) | ||
364 | |||
365 | def save_target_image_file_information(self, target_obj, file_name, file_size): | ||
366 | target_image_file = Target_Image_File.objects.create( target = target_obj, | ||
367 | file_name = file_name, | ||
368 | file_size = file_size) | ||
369 | target_image_file.save() | ||
370 | |||
371 | def create_logmessage(self, log_information): | ||
372 | assert 'build' in log_information | ||
373 | assert 'level' in log_information | ||
374 | assert 'message' in log_information | ||
375 | |||
376 | log_object = LogMessage.objects.create( | ||
377 | build = log_information['build'], | ||
378 | level = log_information['level'], | ||
379 | message = log_information['message']) | ||
380 | |||
381 | for v in vars(log_object): | ||
382 | if v in log_information.keys(): | ||
383 | vars(log_object)[v] = log_information[v] | ||
384 | |||
385 | return log_object.save() | ||
386 | |||
387 | |||
388 | def save_build_package_information(self, build_obj, package_info, recipes): | ||
389 | assert isinstance(build_obj, Build) | ||
390 | |||
391 | # create and save the object | ||
392 | pname = package_info['PKG'] | ||
393 | if 'OPKGN' in package_info.keys(): | ||
394 | pname = package_info['OPKGN'] | ||
395 | |||
396 | bp_object, created = Package.objects.get_or_create( build = build_obj, | ||
397 | name = pname ) | ||
398 | |||
399 | bp_object.installed_name = package_info['PKG'] | ||
400 | bp_object.recipe = recipes[package_info['PN']] | ||
401 | bp_object.version = package_info['PKGV'] | ||
402 | bp_object.revision = package_info['PKGR'] | ||
403 | bp_object.summary = package_info['SUMMARY'] | ||
404 | bp_object.description = package_info['DESCRIPTION'] | ||
405 | bp_object.size = int(package_info['PKGSIZE']) | ||
406 | bp_object.section = package_info['SECTION'] | ||
407 | bp_object.license = package_info['LICENSE'] | ||
408 | bp_object.save() | ||
409 | |||
410 | # save any attached file information | ||
411 | for path in package_info['FILES_INFO']: | ||
412 | fo = Package_File.objects.create( package = bp_object, | ||
413 | path = path, | ||
414 | size = package_info['FILES_INFO'][path] ) | ||
415 | |||
416 | def _po_byname(p): | ||
417 | pkg, created = Package.objects.get_or_create(build = build_obj, name = p) | ||
418 | if created: | ||
419 | pkg.size = -1 | ||
420 | pkg.save() | ||
421 | return pkg | ||
422 | |||
423 | # save soft dependency information | ||
424 | if 'RDEPENDS' in package_info and package_info['RDEPENDS']: | ||
425 | for p in bb.utils.explode_deps(package_info['RDEPENDS']): | ||
426 | Package_Dependency.objects.get_or_create( package = bp_object, | ||
427 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS) | ||
428 | if 'RPROVIDES' in package_info and package_info['RPROVIDES']: | ||
429 | for p in bb.utils.explode_deps(package_info['RPROVIDES']): | ||
430 | Package_Dependency.objects.get_or_create( package = bp_object, | ||
431 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES) | ||
432 | if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']: | ||
433 | for p in bb.utils.explode_deps(package_info['RRECOMMENDS']): | ||
434 | Package_Dependency.objects.get_or_create( package = bp_object, | ||
435 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS) | ||
436 | if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']: | ||
437 | for p in bb.utils.explode_deps(package_info['RSUGGESTS']): | ||
438 | Package_Dependency.objects.get_or_create( package = bp_object, | ||
439 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS) | ||
440 | if 'RREPLACES' in package_info and package_info['RREPLACES']: | ||
441 | for p in bb.utils.explode_deps(package_info['RREPLACES']): | ||
442 | Package_Dependency.objects.get_or_create( package = bp_object, | ||
443 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES) | ||
444 | if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']: | ||
445 | for p in bb.utils.explode_deps(package_info['RCONFLICTS']): | ||
446 | Package_Dependency.objects.get_or_create( package = bp_object, | ||
447 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS) | ||
448 | |||
449 | return bp_object | ||
450 | |||
451 | def save_build_variables(self, build_obj, vardump): | ||
452 | assert isinstance(build_obj, Build) | ||
453 | |||
454 | helptext_objects = [] | ||
455 | |||
456 | for k in vardump: | ||
457 | desc = vardump[k]['doc']; | ||
458 | if desc is None: | ||
459 | var_words = [word for word in k.split('_')] | ||
460 | root_var = "_".join([word for word in var_words if word.isupper()]) | ||
461 | if root_var and root_var != k and root_var in vardump: | ||
462 | desc = vardump[root_var]['doc'] | ||
463 | if desc is None: | ||
464 | desc = '' | ||
465 | if desc: | ||
466 | helptext_obj = HelpText.objects.create(build=build_obj, | ||
467 | area=HelpText.VARIABLE, | ||
468 | key=k, | ||
469 | text=desc) | ||
470 | if not bool(vardump[k]['func']): | ||
471 | value = vardump[k]['v']; | ||
472 | if value is None: | ||
473 | value = '' | ||
474 | variable_obj = Variable.objects.create( build = build_obj, | ||
475 | variable_name = k, | ||
476 | variable_value = value, | ||
477 | description = desc) | ||
478 | for vh in vardump[k]['history']: | ||
479 | if not 'documentation.conf' in vh['file']: | ||
480 | VariableHistory.objects.create( variable = variable_obj, | ||
481 | file_name = vh['file'], | ||
482 | line_number = vh['line'], | ||
483 | operation = vh['op']) | ||
484 | |||
485 | class MockEvent: pass # sometimes we mock an event, declare it here | ||
486 | |||
487 | class BuildInfoHelper(object): | ||
488 | """ This class gathers the build information from the server and sends it | ||
489 | towards the ORM wrapper for storing in the database | ||
490 | It is instantiated once per build | ||
491 | Keeps in memory all data that needs matching before writing it to the database | ||
492 | """ | ||
493 | |||
494 | |||
495 | def __init__(self, server, has_build_history = False): | ||
496 | self._configure_django() | ||
497 | self.internal_state = {} | ||
498 | self.internal_state['taskdata'] = {} | ||
499 | self.task_order = 0 | ||
500 | self.server = server | ||
501 | self.orm_wrapper = ORMWrapper() | ||
502 | self.has_build_history = has_build_history | ||
503 | self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0] | ||
504 | self.brbe = self.server.runCommand(["getVariable", "TOASTER_BRBE"])[0] | ||
505 | |||
506 | |||
507 | def _configure_django(self): | ||
508 | # Add toaster to sys path for importing modules | ||
509 | sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'toaster')) | ||
510 | |||
511 | ################### | ||
512 | ## methods to convert event/external info into objects that the ORM layer uses | ||
513 | |||
514 | |||
515 | def _get_build_information(self): | ||
516 | build_info = {} | ||
517 | # Generate an identifier for each new build | ||
518 | |||
519 | build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0] | ||
520 | build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0] | ||
521 | build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0] | ||
522 | build_info['started_on'] = datetime.datetime.now() | ||
523 | build_info['completed_on'] = datetime.datetime.now() | ||
524 | build_info['cooker_log_path'] = self.server.runCommand(["getVariable", "BB_CONSOLELOG"])[0] | ||
525 | build_info['build_name'] = self.server.runCommand(["getVariable", "BUILDNAME"])[0] | ||
526 | build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0] | ||
527 | |||
528 | return build_info | ||
529 | |||
530 | def _get_task_information(self, event, recipe): | ||
531 | assert 'taskname' in vars(event) | ||
532 | |||
533 | task_information = {} | ||
534 | task_information['build'] = self.internal_state['build'] | ||
535 | task_information['outcome'] = Task.OUTCOME_NA | ||
536 | task_information['recipe'] = recipe | ||
537 | task_information['task_name'] = event.taskname | ||
538 | try: | ||
539 | # some tasks don't come with a hash. and that's ok | ||
540 | task_information['sstate_checksum'] = event.taskhash | ||
541 | except AttributeError: | ||
542 | pass | ||
543 | return task_information | ||
544 | |||
545 | def _get_layer_version_for_path(self, path): | ||
546 | assert path.startswith("/") | ||
547 | assert 'build' in self.internal_state | ||
548 | |||
549 | def _slkey(layer_version): | ||
550 | assert isinstance(layer_version, Layer_Version) | ||
551 | return len(layer_version.layer.local_path) | ||
552 | |||
553 | # Heuristics: we always match recipe to the deepest layer path that | ||
554 | # we can match to the recipe file path | ||
555 | for bl in sorted(Layer_Version.objects.filter(build = self.internal_state['build']), reverse=True, key=_slkey): | ||
556 | if (path.startswith(bl.layer.local_path)): | ||
557 | return bl | ||
558 | |||
559 | #if we get here, we didn't read layers correctly; mockup the new layer | ||
560 | unknown_layer, created = Layer.objects.get_or_create(name="unknown", local_path="/", layer_index_url="") | ||
561 | unknown_layer_version_obj, created = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build']) | ||
562 | |||
563 | return unknown_layer_version_obj | ||
564 | |||
565 | def _get_recipe_information_from_taskfile(self, taskfile): | ||
566 | localfilepath = taskfile.split(":")[-1] | ||
567 | layer_version_obj = self._get_layer_version_for_path(localfilepath) | ||
568 | |||
569 | recipe_info = {} | ||
570 | recipe_info['layer_version'] = layer_version_obj | ||
571 | recipe_info['file_path'] = taskfile | ||
572 | |||
573 | return recipe_info | ||
574 | |||
575 | def _get_path_information(self, task_object): | ||
576 | assert isinstance(task_object, Task) | ||
577 | build_stats_format = "{tmpdir}/buildstats/{target}-{machine}/{buildname}/{package}/" | ||
578 | build_stats_path = [] | ||
579 | |||
580 | for t in self.internal_state['targets']: | ||
581 | target = t.target | ||
582 | machine = self.internal_state['build'].machine | ||
583 | buildname = self.internal_state['build'].build_name | ||
584 | pe, pv = task_object.recipe.version.split(":",1) | ||
585 | if len(pe) > 0: | ||
586 | package = task_object.recipe.name + "-" + pe + "_" + pv | ||
587 | else: | ||
588 | package = task_object.recipe.name + "-" + pv | ||
589 | |||
590 | build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir, target=target, | ||
591 | machine=machine, buildname=buildname, | ||
592 | package=package)) | ||
593 | |||
594 | return build_stats_path | ||
595 | |||
596 | def _remove_redundant(self, string): | ||
597 | ret = [] | ||
598 | for i in string.split(): | ||
599 | if i not in ret: | ||
600 | ret.append(i) | ||
601 | return " ".join(sorted(ret)) | ||
602 | |||
603 | |||
604 | ################################ | ||
605 | ## external available methods to store information | ||
606 | |||
607 | def store_layer_info(self, event): | ||
608 | assert '_localdata' in vars(event) | ||
609 | layerinfos = event._localdata | ||
610 | self.internal_state['lvs'] = {} | ||
611 | for layer in layerinfos: | ||
612 | self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer])] = layerinfos[layer]['version'] | ||
613 | |||
614 | |||
615 | def store_started_build(self, event): | ||
616 | assert '_pkgs' in vars(event) | ||
617 | build_information = self._get_build_information() | ||
618 | |||
619 | build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe) | ||
620 | |||
621 | self.internal_state['build'] = build_obj | ||
622 | |||
623 | # save layer version information for this build | ||
624 | if not 'lvs' in self.internal_state: | ||
625 | logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.") | ||
626 | else: | ||
627 | for layer_obj in self.internal_state['lvs']: | ||
628 | self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj]) | ||
629 | |||
630 | del self.internal_state['lvs'] | ||
631 | |||
632 | # create target information | ||
633 | target_information = {} | ||
634 | target_information['targets'] = event._pkgs | ||
635 | target_information['build'] = build_obj | ||
636 | |||
637 | self.internal_state['targets'] = self.orm_wrapper.create_target_objects(target_information) | ||
638 | |||
639 | # Save build configuration | ||
640 | data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0] | ||
641 | self.orm_wrapper.save_build_variables(build_obj, []) | ||
642 | |||
643 | return self.brbe | ||
644 | |||
645 | |||
646 | |||
647 | def update_target_image_file(self, event): | ||
648 | image_fstypes = self.server.runCommand(["getVariable", "IMAGE_FSTYPES"])[0] | ||
649 | for t in self.internal_state['targets']: | ||
650 | if t.is_image == True: | ||
651 | output_files = list(event._localdata.viewkeys()) | ||
652 | for output in output_files: | ||
653 | if t.target in output and output.split('.rootfs.')[1] in image_fstypes: | ||
654 | self.orm_wrapper.save_target_image_file_information(t, output, event._localdata[output]) | ||
655 | |||
656 | def update_build_information(self, event, errors, warnings, taskfailures): | ||
657 | if 'build' in self.internal_state: | ||
658 | self.orm_wrapper.update_build_object(self.internal_state['build'], errors, warnings, taskfailures) | ||
659 | |||
660 | |||
661 | def store_license_manifest_path(self, event): | ||
662 | deploy_dir = event._localdata['deploy_dir'] | ||
663 | image_name = event._localdata['image_name'] | ||
664 | path = deploy_dir + "/licenses/" + image_name + "/" | ||
665 | for target in self.internal_state['targets']: | ||
666 | if target.target in image_name: | ||
667 | self.orm_wrapper.update_target_object(target, path) | ||
668 | |||
669 | |||
670 | def store_started_task(self, event): | ||
671 | assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped)) | ||
672 | assert 'taskfile' in vars(event) | ||
673 | localfilepath = event.taskfile.split(":")[-1] | ||
674 | assert localfilepath.startswith("/") | ||
675 | |||
676 | identifier = event.taskfile + ":" + event.taskname | ||
677 | |||
678 | recipe_information = self._get_recipe_information_from_taskfile(event.taskfile) | ||
679 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True) | ||
680 | |||
681 | task_information = self._get_task_information(event, recipe) | ||
682 | task_information['outcome'] = Task.OUTCOME_NA | ||
683 | |||
684 | if isinstance(event, bb.runqueue.runQueueTaskSkipped): | ||
685 | assert 'reason' in vars(event) | ||
686 | task_information['task_executed'] = False | ||
687 | if event.reason == "covered": | ||
688 | task_information['outcome'] = Task.OUTCOME_COVERED | ||
689 | if event.reason == "existing": | ||
690 | task_information['outcome'] = Task.OUTCOME_PREBUILT | ||
691 | else: | ||
692 | task_information['task_executed'] = True | ||
693 | if 'noexec' in vars(event) and event.noexec == True: | ||
694 | task_information['task_executed'] = False | ||
695 | task_information['outcome'] = Task.OUTCOME_EMPTY | ||
696 | task_information['script_type'] = Task.CODING_NA | ||
697 | |||
698 | # do not assign order numbers to scene tasks | ||
699 | if not isinstance(event, bb.runqueue.sceneQueueTaskStarted): | ||
700 | self.task_order += 1 | ||
701 | task_information['order'] = self.task_order | ||
702 | |||
703 | task_obj = self.orm_wrapper.get_update_task_object(task_information) | ||
704 | |||
705 | self.internal_state['taskdata'][identifier] = { | ||
706 | 'outcome': task_information['outcome'], | ||
707 | } | ||
708 | |||
709 | |||
710 | def store_tasks_stats(self, event): | ||
711 | for (taskfile, taskname, taskstats, recipename) in event._localdata: | ||
712 | localfilepath = taskfile.split(":")[-1] | ||
713 | assert localfilepath.startswith("/") | ||
714 | |||
715 | recipe_information = self._get_recipe_information_from_taskfile(taskfile) | ||
716 | recipe_object = Recipe.objects.get(layer_version = recipe_information['layer_version'], | ||
717 | file_path__endswith = recipe_information['file_path'], | ||
718 | name = recipename) | ||
719 | |||
720 | task_information = {} | ||
721 | task_information['build'] = self.internal_state['build'] | ||
722 | task_information['recipe'] = recipe_object | ||
723 | task_information['task_name'] = taskname | ||
724 | task_information['cpu_usage'] = taskstats['cpu_usage'] | ||
725 | task_information['disk_io'] = taskstats['disk_io'] | ||
726 | task_obj = self.orm_wrapper.get_update_task_object(task_information, True) # must exist | ||
727 | |||
728 | def update_and_store_task(self, event): | ||
729 | assert 'taskfile' in vars(event) | ||
730 | localfilepath = event.taskfile.split(":")[-1] | ||
731 | assert localfilepath.startswith("/") | ||
732 | |||
733 | identifier = event.taskfile + ":" + event.taskname | ||
734 | if not identifier in self.internal_state['taskdata']: | ||
735 | if isinstance(event, bb.build.TaskBase): | ||
736 | # we do a bit of guessing | ||
737 | candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)] | ||
738 | if len(candidates) == 1: | ||
739 | identifier = candidates[0] | ||
740 | |||
741 | assert identifier in self.internal_state['taskdata'] | ||
742 | identifierlist = identifier.split(":") | ||
743 | realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1]) | ||
744 | recipe_information = self._get_recipe_information_from_taskfile(realtaskfile) | ||
745 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True) | ||
746 | task_information = self._get_task_information(event,recipe) | ||
747 | |||
748 | if 'time' in vars(event): | ||
749 | if not 'start_time' in self.internal_state['taskdata'][identifier]: | ||
750 | self.internal_state['taskdata'][identifier]['start_time'] = event.time | ||
751 | else: | ||
752 | task_information['end_time'] = event.time | ||
753 | task_information['start_time'] = self.internal_state['taskdata'][identifier]['start_time'] | ||
754 | |||
755 | task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome'] | ||
756 | |||
757 | if 'logfile' in vars(event): | ||
758 | task_information['logfile'] = event.logfile | ||
759 | |||
760 | if '_message' in vars(event): | ||
761 | task_information['message'] = event._message | ||
762 | |||
763 | if 'taskflags' in vars(event): | ||
764 | # with TaskStarted, we get even more information | ||
765 | if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1': | ||
766 | task_information['script_type'] = Task.CODING_PYTHON | ||
767 | else: | ||
768 | task_information['script_type'] = Task.CODING_SHELL | ||
769 | |||
770 | if task_information['outcome'] == Task.OUTCOME_NA: | ||
771 | if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)): | ||
772 | task_information['outcome'] = Task.OUTCOME_SUCCESS | ||
773 | del self.internal_state['taskdata'][identifier] | ||
774 | |||
775 | if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)): | ||
776 | task_information['outcome'] = Task.OUTCOME_FAILED | ||
777 | del self.internal_state['taskdata'][identifier] | ||
778 | |||
779 | self.orm_wrapper.get_update_task_object(task_information, True) # must exist | ||
780 | |||
781 | |||
782 | def store_missed_state_tasks(self, event): | ||
783 | for (fn, taskname, taskhash, sstatefile) in event._localdata['missed']: | ||
784 | |||
785 | identifier = fn + taskname + "_setscene" | ||
786 | recipe_information = self._get_recipe_information_from_taskfile(fn) | ||
787 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_information) | ||
788 | mevent = MockEvent() | ||
789 | mevent.taskname = taskname | ||
790 | mevent.taskhash = taskhash | ||
791 | task_information = self._get_task_information(mevent,recipe) | ||
792 | |||
793 | task_information['start_time'] = datetime.datetime.now() | ||
794 | task_information['outcome'] = Task.OUTCOME_NA | ||
795 | task_information['sstate_checksum'] = taskhash | ||
796 | task_information['sstate_result'] = Task.SSTATE_MISS | ||
797 | task_information['path_to_sstate_obj'] = sstatefile | ||
798 | |||
799 | self.orm_wrapper.get_update_task_object(task_information) | ||
800 | |||
801 | for (fn, taskname, taskhash, sstatefile) in event._localdata['found']: | ||
802 | |||
803 | identifier = fn + taskname + "_setscene" | ||
804 | recipe_information = self._get_recipe_information_from_taskfile(fn) | ||
805 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_information) | ||
806 | mevent = MockEvent() | ||
807 | mevent.taskname = taskname | ||
808 | mevent.taskhash = taskhash | ||
809 | task_information = self._get_task_information(mevent,recipe) | ||
810 | |||
811 | task_information['path_to_sstate_obj'] = sstatefile | ||
812 | |||
813 | self.orm_wrapper.get_update_task_object(task_information) | ||
814 | |||
815 | |||
816 | def store_target_package_data(self, event): | ||
817 | assert '_localdata' in vars(event) | ||
818 | # for all image targets | ||
819 | for target in self.internal_state['targets']: | ||
820 | if target.is_image: | ||
821 | try: | ||
822 | pkgdata = event._localdata['pkgdata'] | ||
823 | imgdata = event._localdata['imgdata'][target.target] | ||
824 | self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes']) | ||
825 | filedata = event._localdata['filedata'][target.target] | ||
826 | self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata) | ||
827 | except KeyError: | ||
828 | # we must have not got the data for this image, nothing to save | ||
829 | pass | ||
830 | |||
831 | |||
832 | |||
833 | def store_dependency_information(self, event): | ||
834 | assert '_depgraph' in vars(event) | ||
835 | assert 'layer-priorities' in event._depgraph | ||
836 | assert 'pn' in event._depgraph | ||
837 | assert 'tdepends' in event._depgraph | ||
838 | |||
839 | errormsg = "" | ||
840 | |||
841 | # save layer version priorities | ||
842 | if 'layer-priorities' in event._depgraph.keys(): | ||
843 | for lv in event._depgraph['layer-priorities']: | ||
844 | (name, path, regexp, priority) = lv | ||
845 | layer_version_obj = self._get_layer_version_for_path(path[1:]) # paths start with a ^ | ||
846 | assert layer_version_obj is not None | ||
847 | layer_version_obj.priority = priority | ||
848 | layer_version_obj.save() | ||
849 | |||
850 | # save recipe information | ||
851 | self.internal_state['recipes'] = {} | ||
852 | for pn in event._depgraph['pn']: | ||
853 | |||
854 | file_name = event._depgraph['pn'][pn]['filename'] | ||
855 | layer_version_obj = self._get_layer_version_for_path(file_name.split(":")[-1]) | ||
856 | |||
857 | assert layer_version_obj is not None | ||
858 | |||
859 | recipe_info = {} | ||
860 | recipe_info['name'] = pn | ||
861 | recipe_info['layer_version'] = layer_version_obj | ||
862 | |||
863 | if 'version' in event._depgraph['pn'][pn]: | ||
864 | recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":") | ||
865 | |||
866 | if 'summary' in event._depgraph['pn'][pn]: | ||
867 | recipe_info['summary'] = event._depgraph['pn'][pn]['summary'] | ||
868 | |||
869 | if 'license' in event._depgraph['pn'][pn]: | ||
870 | recipe_info['license'] = event._depgraph['pn'][pn]['license'] | ||
871 | |||
872 | if 'description' in event._depgraph['pn'][pn]: | ||
873 | recipe_info['description'] = event._depgraph['pn'][pn]['description'] | ||
874 | |||
875 | if 'section' in event._depgraph['pn'][pn]: | ||
876 | recipe_info['section'] = event._depgraph['pn'][pn]['section'] | ||
877 | |||
878 | if 'homepage' in event._depgraph['pn'][pn]: | ||
879 | recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage'] | ||
880 | |||
881 | if 'bugtracker' in event._depgraph['pn'][pn]: | ||
882 | recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker'] | ||
883 | |||
884 | recipe_info['file_path'] = file_name | ||
885 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_info) | ||
886 | recipe.is_image = False | ||
887 | if 'inherits' in event._depgraph['pn'][pn].keys(): | ||
888 | for cls in event._depgraph['pn'][pn]['inherits']: | ||
889 | if cls.endswith('/image.bbclass'): | ||
890 | recipe.is_image = True | ||
891 | break | ||
892 | if recipe.is_image: | ||
893 | for t in self.internal_state['targets']: | ||
894 | if pn == t.target: | ||
895 | t.is_image = True | ||
896 | t.save() | ||
897 | self.internal_state['recipes'][pn] = recipe | ||
898 | |||
899 | # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED | ||
900 | |||
901 | assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split() | ||
902 | |||
903 | # save recipe dependency | ||
904 | # buildtime | ||
905 | for recipe in event._depgraph['depends']: | ||
906 | try: | ||
907 | target = self.internal_state['recipes'][recipe] | ||
908 | for dep in event._depgraph['depends'][recipe]: | ||
909 | dependency = self.internal_state['recipes'][dep] | ||
910 | Recipe_Dependency.objects.get_or_create( recipe = target, | ||
911 | depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS) | ||
912 | except KeyError as e: | ||
913 | if e not in assume_provided and not str(e).startswith("virtual/"): | ||
914 | errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, e) | ||
915 | |||
916 | # save all task information | ||
917 | def _save_a_task(taskdesc): | ||
918 | spec = re.split(r'\.', taskdesc); | ||
919 | pn = ".".join(spec[0:-1]) | ||
920 | taskname = spec[-1] | ||
921 | e = event | ||
922 | e.taskname = pn | ||
923 | recipe = self.internal_state['recipes'][pn] | ||
924 | task_info = self._get_task_information(e, recipe) | ||
925 | task_info['task_name'] = taskname | ||
926 | task_obj = self.orm_wrapper.get_update_task_object(task_info) | ||
927 | return task_obj | ||
928 | |||
929 | # create tasks | ||
930 | tasks = {} | ||
931 | for taskdesc in event._depgraph['tdepends']: | ||
932 | tasks[taskdesc] = _save_a_task(taskdesc) | ||
933 | |||
934 | # create dependencies between tasks | ||
935 | for taskdesc in event._depgraph['tdepends']: | ||
936 | target = tasks[taskdesc] | ||
937 | for taskdep in event._depgraph['tdepends'][taskdesc]: | ||
938 | if taskdep not in tasks: | ||
939 | # Fetch tasks info is not collected previously | ||
940 | dep = _save_a_task(taskdep) | ||
941 | else: | ||
942 | dep = tasks[taskdep] | ||
943 | Task_Dependency.objects.get_or_create( task = target, depends_on = dep ) | ||
944 | |||
945 | if (len(errormsg) > 0): | ||
946 | raise Exception(errormsg) | ||
947 | |||
948 | |||
949 | def store_build_package_information(self, event): | ||
950 | assert '_localdata' in vars(event) | ||
951 | package_info = event._localdata | ||
952 | self.orm_wrapper.save_build_package_information(self.internal_state['build'], | ||
953 | package_info, | ||
954 | self.internal_state['recipes'], | ||
955 | ) | ||
956 | |||
957 | def _store_build_done(self): | ||
958 | br_id, be_id = self.brbe.split(":") | ||
959 | from bldcontrol.models import BuildEnvironment, BuildRequest | ||
960 | be = BuildEnvironment.objects.get(pk = be_id) | ||
961 | be.lock = BuildEnvironment.LOCK_LOCK | ||
962 | be.save() | ||
963 | br = BuildRequest.objects.get(pk = br_id) | ||
964 | br.state = BuildRequest.REQ_COMPLETED | ||
965 | br.build = self.internal_state['build'] | ||
966 | br.save() | ||
967 | |||
968 | |||
969 | def store_log_error(self, text): | ||
970 | mockevent = MockEvent() | ||
971 | mockevent.levelno = format.ERROR | ||
972 | mockevent.msg = text | ||
973 | self.store_log_event(mockevent) | ||
974 | |||
975 | def store_log_event(self, event): | ||
976 | if event.levelno < format.WARNING: | ||
977 | return | ||
978 | |||
979 | if 'args' in vars(event): | ||
980 | event.msg = event.msg % event.args | ||
981 | |||
982 | if not 'build' in self.internal_state: | ||
983 | if self.brbe is None: | ||
984 | if not 'backlog' in self.internal_state: | ||
985 | self.internal_state['backlog'] = [] | ||
986 | self.internal_state['backlog'].append(event) | ||
987 | else: # we're under Toaster control, post the errors to the build request | ||
988 | from bldcontrol.models import BuildRequest, BRError | ||
989 | br, be = brbe.split(":") | ||
990 | buildrequest = BuildRequest.objects.get(pk = br) | ||
991 | brerror = BRError.objects.create(req = buildrequest, errtype="build", errmsg = event.msg) | ||
992 | |||
993 | return | ||
994 | |||
995 | if 'build' in self.internal_state and 'backlog' in self.internal_state: | ||
996 | if len(self.internal_state['backlog']): | ||
997 | tempevent = self.internal_state['backlog'].pop() | ||
998 | print " Saving stored event ", tempevent | ||
999 | self.store_log_event(tempevent) | ||
1000 | else: | ||
1001 | del self.internal_state['backlog'] | ||
1002 | |||
1003 | log_information = {} | ||
1004 | log_information['build'] = self.internal_state['build'] | ||
1005 | if event.levelno == format.ERROR: | ||
1006 | log_information['level'] = LogMessage.ERROR | ||
1007 | elif event.levelno == format.WARNING: | ||
1008 | log_information['level'] = LogMessage.WARNING | ||
1009 | else: | ||
1010 | log_information['level'] = LogMessage.INFO | ||
1011 | |||
1012 | log_information['message'] = event.msg | ||
1013 | log_information['pathname'] = event.pathname | ||
1014 | log_information['lineno'] = event.lineno | ||
1015 | self.orm_wrapper.create_logmessage(log_information) | ||
1016 | |||
1017 | def close(self): | ||
1018 | if self.brbe is not None: | ||
1019 | buildinfohelper._store_build_done() | ||
1020 | |||
1021 | if 'backlog' in self.internal_state: | ||
1022 | for event in self.internal_state['backlog']: | ||
1023 | logger.error("Unsaved log: %s", event.msg) | ||