blob: 2d1ed51116af83417fdcb6c170d5fb4ec00eac45 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake ToasterUI Implementation
3#
4# Copyright (C) 2013 Intel Corporation
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
19import sys
20import bb
21import re
22import os
23
24os.environ["DJANGO_SETTINGS_MODULE"] = "toaster.toastermain.settings"
25
26
27from django.utils import timezone
28
29
30def _configure_toaster():
31 """ Add toaster to sys path for importing modules
32 """
33 sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'toaster'))
34_configure_toaster()
35
36from toaster.orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
37from toaster.orm.models import Target_Image_File, BuildArtifact
38from toaster.orm.models import Variable, VariableHistory
39from toaster.orm.models import Package, Package_File, Target_Installed_Package, Target_File
40from toaster.orm.models import Task_Dependency, Package_Dependency
41from toaster.orm.models import Recipe_Dependency
42
43from toaster.orm.models import Project
44from bldcontrol.models import BuildEnvironment, BuildRequest
45
46from bb.msg import BBLogFormatter as formatter
47from django.db import models
48from pprint import pformat
49import logging
50
51from django.db import transaction, connection
52
53# pylint: disable=invalid-name
54# the logger name is standard throughout BitBake
55logger = logging.getLogger("ToasterLogger")
56
57
58class NotExisting(Exception):
59 pass
60
61class ORMWrapper(object):
62 """ This class creates the dictionaries needed to store information in the database
63 following the format defined by the Django models. It is also used to save this
64 information in the database.
65 """
66
67 def __init__(self):
68 self.layer_version_objects = []
69 self.task_objects = {}
70 self.recipe_objects = {}
71
72 @staticmethod
73 def _build_key(**kwargs):
74 key = "0"
75 for k in sorted(kwargs.keys()):
76 if isinstance(kwargs[k], models.Model):
77 key += "-%d" % kwargs[k].id
78 else:
79 key += "-%s" % str(kwargs[k])
80 return key
81
82
83 def _cached_get_or_create(self, clazz, **kwargs):
84 """ This is a memory-cached get_or_create. We assume that the objects will not be created in the
85 database through any other means.
86 """
87
88 assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
89
90 key = ORMWrapper._build_key(**kwargs)
91 dictname = "objects_%s" % clazz.__name__
92 if not dictname in vars(self).keys():
93 vars(self)[dictname] = {}
94
95 created = False
96 if not key in vars(self)[dictname].keys():
97 vars(self)[dictname][key] = clazz.objects.create(**kwargs)
98 created = True
99
100 return (vars(self)[dictname][key], created)
101
102
103 def _cached_get(self, clazz, **kwargs):
104 """ This is a memory-cached get. We assume that the objects will not change in the database between gets.
105 """
106 assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
107
108 key = ORMWrapper._build_key(**kwargs)
109 dictname = "objects_%s" % clazz.__name__
110
111 if not dictname in vars(self).keys():
112 vars(self)[dictname] = {}
113
114 if not key in vars(self)[dictname].keys():
115 vars(self)[dictname][key] = clazz.objects.get(**kwargs)
116
117 return vars(self)[dictname][key]
118
119 # pylint: disable=no-self-use
120 # we disable detection of no self use in functions because the methods actually work on the object
121 # even if they don't touch self anywhere
122
123 # pylint: disable=bad-continuation
124 # we do not follow the python conventions for continuation indentation due to long lines here
125
126 def create_build_object(self, build_info, brbe, project_id):
127 assert 'machine' in build_info
128 assert 'distro' in build_info
129 assert 'distro_version' in build_info
130 assert 'started_on' in build_info
131 assert 'cooker_log_path' in build_info
132 assert 'build_name' in build_info
133 assert 'bitbake_version' in build_info
134
135 prj = None
136 buildrequest = None
137 if brbe is not None: # this build was triggered by a request from a user
138 logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
139 br, _ = brbe.split(":")
140 buildrequest = BuildRequest.objects.get(pk = br)
141 prj = buildrequest.project
142
143 elif project_id is not None: # this build was triggered by an external system for a specific project
144 logger.debug(1, "buildinfohelper: project is %s" % prj)
145 prj = Project.objects.get(pk = project_id)
146
147 else: # this build was triggered by a legacy system, or command line interactive mode
148 prj = Project.objects.get_default_project()
149 logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj)
150
151
152 if buildrequest is not None:
153 build = buildrequest.build
154 logger.info("Updating existing build, with %s", build_info)
155 build.project = prj
156 build.machine=build_info['machine']
157 build.distro=build_info['distro']
158 build.distro_version=build_info['distro_version']
159 build.cooker_log_path=build_info['cooker_log_path']
160 build.build_name=build_info['build_name']
161 build.bitbake_version=build_info['bitbake_version']
162 build.save()
163
164 Target.objects.filter(build = build).delete()
165
166 else:
167 build = Build.objects.create(
168 project = prj,
169 machine=build_info['machine'],
170 distro=build_info['distro'],
171 distro_version=build_info['distro_version'],
172 started_on=build_info['started_on'],
173 completed_on=build_info['started_on'],
174 cooker_log_path=build_info['cooker_log_path'],
175 build_name=build_info['build_name'],
176 bitbake_version=build_info['bitbake_version'])
177
178 logger.debug(1, "buildinfohelper: build is created %s" % build)
179
180 if buildrequest is not None:
181 buildrequest.build = build
182 buildrequest.save()
183
184 return build
185
186 def create_target_objects(self, target_info):
187 assert 'build' in target_info
188 assert 'targets' in target_info
189
190 targets = []
191 for tgt_name in target_info['targets']:
192 tgt_object = Target.objects.create( build = target_info['build'],
193 target = tgt_name,
194 is_image = False,
195 )
196 targets.append(tgt_object)
197 return targets
198
199 def update_build_object(self, build, errors, warnings, taskfailures):
200 assert isinstance(build,Build)
201 assert isinstance(errors, int)
202 assert isinstance(warnings, int)
203
204 outcome = Build.SUCCEEDED
205 if errors or taskfailures:
206 outcome = Build.FAILED
207
208 build.completed_on = timezone.now()
209 build.outcome = outcome
210 build.save()
211
212 def update_target_set_license_manifest(self, target, license_manifest_path):
213 target.license_manifest_path = license_manifest_path
214 target.save()
215
216 def get_update_task_object(self, task_information, must_exist = False):
217 assert 'build' in task_information
218 assert 'recipe' in task_information
219 assert 'task_name' in task_information
220
221 # we use must_exist info for database look-up optimization
222 task_object, created = self._cached_get_or_create(Task,
223 build=task_information['build'],
224 recipe=task_information['recipe'],
225 task_name=task_information['task_name']
226 )
227 if created and must_exist:
228 task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
229 raise NotExisting("Task object created when expected to exist", task_information)
230
231 object_changed = False
232 for v in vars(task_object):
233 if v in task_information.keys():
234 if vars(task_object)[v] != task_information[v]:
235 vars(task_object)[v] = task_information[v]
236 object_changed = True
237
238 # update setscene-related information if the task has a setscene
239 if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count():
240 task_object.outcome = Task.OUTCOME_CACHED
241 object_changed = True
242
243 outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
244 recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
245 if outcome_task_setscene == Task.OUTCOME_SUCCESS:
246 task_object.sstate_result = Task.SSTATE_RESTORED
247 object_changed = True
248 elif outcome_task_setscene == Task.OUTCOME_FAILED:
249 task_object.sstate_result = Task.SSTATE_FAILED
250 object_changed = True
251
252 # mark down duration if we have a start time and a current time
253 if 'start_time' in task_information.keys() and 'end_time' in task_information.keys():
254 duration = task_information['end_time'] - task_information['start_time']
255 task_object.elapsed_time = duration
256 object_changed = True
257 del task_information['start_time']
258 del task_information['end_time']
259
260 if object_changed:
261 task_object.save()
262 return task_object
263
264
265 def get_update_recipe_object(self, recipe_information, must_exist = False):
266 assert 'layer_version' in recipe_information
267 assert 'file_path' in recipe_information
268 assert 'pathflags' in recipe_information
269
270 assert not recipe_information['file_path'].startswith("/") # we should have layer-relative paths at all times
271
272 recipe_object, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
273 file_path=recipe_information['file_path'], pathflags = recipe_information['pathflags'])
274 if created and must_exist:
275 raise NotExisting("Recipe object created when expected to exist", recipe_information)
276
277 object_changed = False
278 for v in vars(recipe_object):
279 if v in recipe_information.keys():
280 object_changed = True
281 vars(recipe_object)[v] = recipe_information[v]
282
283 if object_changed:
284 recipe_object.save()
285
286 return recipe_object
287
288 def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information):
289 assert isinstance(build_obj, Build)
290 assert isinstance(layer_obj, Layer)
291 assert 'branch' in layer_version_information
292 assert 'commit' in layer_version_information
293 assert 'priority' in layer_version_information
294 assert 'local_path' in layer_version_information
295
296 layer_version_object, _ = Layer_Version.objects.get_or_create(
297 build = build_obj,
298 layer = layer_obj,
299 branch = layer_version_information['branch'],
300 commit = layer_version_information['commit'],
301 priority = layer_version_information['priority'],
302 local_path = layer_version_information['local_path'],
303 )
304
305 self.layer_version_objects.append(layer_version_object)
306
307 return layer_version_object
308
309 def get_update_layer_object(self, layer_information, brbe):
310 assert 'name' in layer_information
311 assert 'layer_index_url' in layer_information
312
313 if brbe is None:
314 layer_object, _ = Layer.objects.get_or_create(
315 name=layer_information['name'],
316 layer_index_url=layer_information['layer_index_url'])
317 return layer_object
318 else:
319 # we are under managed mode; we must match the layer used in the Project Layer
320 br_id, be_id = brbe.split(":")
321
322 # find layer by checkout path;
323 from bldcontrol import bbcontroller
324 bc = bbcontroller.getBuildEnvironmentController(pk = be_id)
325
326 # we might have a race condition here, as the project layers may change between the build trigger and the actual build execution
327 # but we can only match on the layer name, so the worst thing can happen is a mis-identification of the layer, not a total failure
328
329 # note that this is different
330 buildrequest = BuildRequest.objects.get(pk = br_id)
331 for brl in buildrequest.brlayer_set.all():
332 localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
333 # we get a relative path, unless running in HEAD mode where the path is absolute
334 if not localdirname.startswith("/"):
335 localdirname = os.path.join(bc.be.sourcedir, localdirname)
336 #logger.debug(1, "Localdirname %s lcal_path %s" % (localdirname, layer_information['local_path']))
337 if localdirname.startswith(layer_information['local_path']):
338 # we matched the BRLayer, but we need the layer_version that generated this BR; reverse of the Project.schedule_build()
339 #logger.debug(1, "Matched %s to BRlayer %s" % (pformat(layer_information["local_path"]), localdirname))
340 for pl in buildrequest.project.projectlayer_set.filter(layercommit__layer__name = brl.name):
341 if pl.layercommit.layer.vcs_url == brl.giturl :
342 layer = pl.layercommit.layer
343 layer.save()
344 return layer
345
346 raise NotExisting("Unidentified layer %s" % pformat(layer_information))
347
348
349 def save_target_file_information(self, build_obj, target_obj, filedata):
350 assert isinstance(build_obj, Build)
351 assert isinstance(target_obj, Target)
352 dirs = filedata['dirs']
353 files = filedata['files']
354 syms = filedata['syms']
355
356 # we insert directories, ordered by name depth
357 for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))):
358 (user, group, size) = d[1:4]
359 permission = d[0][1:]
360 path = d[4].lstrip(".")
361 if len(path) == 0:
362 # we create the root directory as a special case
363 path = "/"
364 tf_obj = Target_File.objects.create(
365 target = target_obj,
366 path = path,
367 size = size,
368 inodetype = Target_File.ITYPE_DIRECTORY,
369 permission = permission,
370 owner = user,
371 group = group,
372 )
373 tf_obj.directory = tf_obj
374 tf_obj.save()
375 continue
376 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
377 if len(parent_path) == 0:
378 parent_path = "/"
379 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
380 tf_obj = Target_File.objects.create(
381 target = target_obj,
382 path = path,
383 size = size,
384 inodetype = Target_File.ITYPE_DIRECTORY,
385 permission = permission,
386 owner = user,
387 group = group,
388 directory = parent_obj)
389
390
391 # we insert files
392 for d in files:
393 (user, group, size) = d[1:4]
394 permission = d[0][1:]
395 path = d[4].lstrip(".")
396 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
397 inodetype = Target_File.ITYPE_REGULAR
398 if d[0].startswith('b'):
399 inodetype = Target_File.ITYPE_BLOCK
400 if d[0].startswith('c'):
401 inodetype = Target_File.ITYPE_CHARACTER
402 if d[0].startswith('p'):
403 inodetype = Target_File.ITYPE_FIFO
404
405 tf_obj = Target_File.objects.create(
406 target = target_obj,
407 path = path,
408 size = size,
409 inodetype = inodetype,
410 permission = permission,
411 owner = user,
412 group = group)
413 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
414 tf_obj.directory = parent_obj
415 tf_obj.save()
416
417 # we insert symlinks
418 for d in syms:
419 (user, group, size) = d[1:4]
420 permission = d[0][1:]
421 path = d[4].lstrip(".")
422 filetarget_path = d[6]
423
424 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
425 if not filetarget_path.startswith("/"):
426 # we have a relative path, get a normalized absolute one
427 filetarget_path = parent_path + "/" + filetarget_path
428 fcp = filetarget_path.split("/")
429 fcpl = []
430 for i in fcp:
431 if i == "..":
432 fcpl.pop()
433 else:
434 fcpl.append(i)
435 filetarget_path = "/".join(fcpl)
436
437 try:
438 filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path)
439 except Target_File.DoesNotExist:
440 # we might have an invalid link; no way to detect this. just set it to None
441 filetarget_obj = None
442
443 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
444
445 tf_obj = Target_File.objects.create(
446 target = target_obj,
447 path = path,
448 size = size,
449 inodetype = Target_File.ITYPE_SYMLINK,
450 permission = permission,
451 owner = user,
452 group = group,
453 directory = parent_obj,
454 sym_target = filetarget_obj)
455
456
457 def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes):
458 assert isinstance(build_obj, Build)
459 assert isinstance(target_obj, Target)
460
461 errormsg = ""
462 for p in packagedict:
463 searchname = p
464 if 'OPKGN' in pkgpnmap[p].keys():
465 searchname = pkgpnmap[p]['OPKGN']
466
467 packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
468 if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
469 # fill in everything we can from the runtime-reverse package data
470 try:
471 packagedict[p]['object'].recipe = recipes[pkgpnmap[p]['PN']]
472 packagedict[p]['object'].version = pkgpnmap[p]['PV']
473 packagedict[p]['object'].installed_name = p
474 packagedict[p]['object'].revision = pkgpnmap[p]['PR']
475 packagedict[p]['object'].license = pkgpnmap[p]['LICENSE']
476 packagedict[p]['object'].section = pkgpnmap[p]['SECTION']
477 packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY']
478 packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION']
479 packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
480
481 # no files recorded for this package, so save files info
482 packagefile_objects = []
483 for targetpath in pkgpnmap[p]['FILES_INFO']:
484 targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
485 packagefile_objects.append(Package_File( package = packagedict[p]['object'],
486 path = targetpath,
487 size = targetfilesize))
488 if len(packagefile_objects):
489 Package_File.objects.bulk_create(packagefile_objects)
490 except KeyError as e:
491 errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
492
493 # save disk installed size
494 packagedict[p]['object'].installed_size = packagedict[p]['size']
495 packagedict[p]['object'].save()
496
497 Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
498
499 packagedeps_objs = []
500 for p in packagedict:
501 for (px,deptype) in packagedict[p]['depends']:
502 if deptype == 'depends':
503 tdeptype = Package_Dependency.TYPE_TRDEPENDS
504 elif deptype == 'recommends':
505 tdeptype = Package_Dependency.TYPE_TRECOMMENDS
506
507 packagedeps_objs.append(Package_Dependency( package = packagedict[p]['object'],
508 depends_on = packagedict[px]['object'],
509 dep_type = tdeptype,
510 target = target_obj))
511
512 if len(packagedeps_objs) > 0:
513 Package_Dependency.objects.bulk_create(packagedeps_objs)
514
515 if len(errormsg) > 0:
516 logger.warn("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
517
518 def save_target_image_file_information(self, target_obj, file_name, file_size):
519 Target_Image_File.objects.create( target = target_obj,
520 file_name = file_name,
521 file_size = file_size)
522
523 def save_artifact_information(self, build_obj, file_name, file_size):
524 # we skip the image files from other builds
525 if Target_Image_File.objects.filter(file_name = file_name).count() > 0:
526 return
527
528 # do not update artifacts found in other builds
529 if BuildArtifact.objects.filter(file_name = file_name).count() > 0:
530 return
531
532 BuildArtifact.objects.create(build = build_obj, file_name = file_name, file_size = file_size)
533
534 def create_logmessage(self, log_information):
535 assert 'build' in log_information
536 assert 'level' in log_information
537 assert 'message' in log_information
538
539 log_object = LogMessage.objects.create(
540 build = log_information['build'],
541 level = log_information['level'],
542 message = log_information['message'])
543
544 for v in vars(log_object):
545 if v in log_information.keys():
546 vars(log_object)[v] = log_information[v]
547
548 return log_object.save()
549
550
551 def save_build_package_information(self, build_obj, package_info, recipes):
552 assert isinstance(build_obj, Build)
553
554 # create and save the object
555 pname = package_info['PKG']
556 if 'OPKGN' in package_info.keys():
557 pname = package_info['OPKGN']
558
559 bp_object, _ = Package.objects.get_or_create( build = build_obj,
560 name = pname )
561
562 bp_object.installed_name = package_info['PKG']
563 bp_object.recipe = recipes[package_info['PN']]
564 bp_object.version = package_info['PKGV']
565 bp_object.revision = package_info['PKGR']
566 bp_object.summary = package_info['SUMMARY']
567 bp_object.description = package_info['DESCRIPTION']
568 bp_object.size = int(package_info['PKGSIZE'])
569 bp_object.section = package_info['SECTION']
570 bp_object.license = package_info['LICENSE']
571 bp_object.save()
572
573 # save any attached file information
574 packagefile_objects = []
575 for path in package_info['FILES_INFO']:
576 packagefile_objects.append(Package_File( package = bp_object,
577 path = path,
578 size = package_info['FILES_INFO'][path] ))
579 if len(packagefile_objects):
580 Package_File.objects.bulk_create(packagefile_objects)
581
582 def _po_byname(p):
583 pkg, created = Package.objects.get_or_create(build = build_obj, name = p)
584 if created:
585 pkg.size = -1
586 pkg.save()
587 return pkg
588
589 packagedeps_objs = []
590 # save soft dependency information
591 if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
592 for p in bb.utils.explode_deps(package_info['RDEPENDS']):
593 packagedeps_objs.append(Package_Dependency( package = bp_object,
594 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
595 if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
596 for p in bb.utils.explode_deps(package_info['RPROVIDES']):
597 packagedeps_objs.append(Package_Dependency( package = bp_object,
598 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
599 if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
600 for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
601 packagedeps_objs.append(Package_Dependency( package = bp_object,
602 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
603 if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
604 for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
605 packagedeps_objs.append(Package_Dependency( package = bp_object,
606 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
607 if 'RREPLACES' in package_info and package_info['RREPLACES']:
608 for p in bb.utils.explode_deps(package_info['RREPLACES']):
609 packagedeps_objs.append(Package_Dependency( package = bp_object,
610 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
611 if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
612 for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
613 packagedeps_objs.append(Package_Dependency( package = bp_object,
614 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
615
616 if len(packagedeps_objs) > 0:
617 Package_Dependency.objects.bulk_create(packagedeps_objs)
618
619 return bp_object
620
621 def save_build_variables(self, build_obj, vardump):
622 assert isinstance(build_obj, Build)
623
624 helptext_objects = []
625 for k in vardump:
626 desc = vardump[k]['doc']
627 if desc is None:
628 var_words = [word for word in k.split('_')]
629 root_var = "_".join([word for word in var_words if word.isupper()])
630 if root_var and root_var != k and root_var in vardump:
631 desc = vardump[root_var]['doc']
632 if desc is None:
633 desc = ''
634 if len(desc):
635 helptext_objects.append(HelpText(build=build_obj,
636 area=HelpText.VARIABLE,
637 key=k,
638 text=desc))
639 if not bool(vardump[k]['func']):
640 value = vardump[k]['v']
641 if value is None:
642 value = ''
643 variable_obj = Variable.objects.create( build = build_obj,
644 variable_name = k,
645 variable_value = value,
646 description = desc)
647
648 varhist_objects = []
649 for vh in vardump[k]['history']:
650 if not 'documentation.conf' in vh['file']:
651 varhist_objects.append(VariableHistory( variable = variable_obj,
652 file_name = vh['file'],
653 line_number = vh['line'],
654 operation = vh['op']))
655 if len(varhist_objects):
656 VariableHistory.objects.bulk_create(varhist_objects)
657
658 HelpText.objects.bulk_create(helptext_objects)
659
660
661class MockEvent(object):
662 """ This object is used to create event, for which normal event-processing methods can
663 be used, out of data that is not coming via an actual event
664 """
665 def __init__(self):
666 self.msg = None
667 self.levelno = None
668 self.taskname = None
669 self.taskhash = None
670 self.pathname = None
671 self.lineno = None
672
673
674class BuildInfoHelper(object):
675 """ This class gathers the build information from the server and sends it
676 towards the ORM wrapper for storing in the database
677 It is instantiated once per build
678 Keeps in memory all data that needs matching before writing it to the database
679 """
680
681 # pylint: disable=protected-access
682 # the code will look into the protected variables of the event; no easy way around this
683 # pylint: disable=bad-continuation
684 # we do not follow the python conventions for continuation indentation due to long lines here
685
686 def __init__(self, server, has_build_history = False):
687 self.internal_state = {}
688 self.internal_state['taskdata'] = {}
689 self.task_order = 0
690 self.autocommit_step = 1
691 self.server = server
692 # we use manual transactions if the database doesn't autocommit on us
693 if not connection.features.autocommits_when_autocommit_is_off:
694 transaction.set_autocommit(False)
695 self.orm_wrapper = ORMWrapper()
696 self.has_build_history = has_build_history
697 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
698 self.brbe = self.server.runCommand(["getVariable", "TOASTER_BRBE"])[0]
699 self.project = self.server.runCommand(["getVariable", "TOASTER_PROJECT"])[0]
700 logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
701
702
703 ###################
704 ## methods to convert event/external info into objects that the ORM layer uses
705
706
707 def _get_build_information(self):
708 build_info = {}
709 # Generate an identifier for each new build
710
711 build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
712 build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
713 build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
714 build_info['started_on'] = timezone.now()
715 build_info['completed_on'] = timezone.now()
716 build_info['cooker_log_path'] = self.server.runCommand(["getVariable", "BB_CONSOLELOG"])[0]
717 build_info['build_name'] = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
718 build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
719
720 return build_info
721
722 def _get_task_information(self, event, recipe):
723 assert 'taskname' in vars(event)
724
725 task_information = {}
726 task_information['build'] = self.internal_state['build']
727 task_information['outcome'] = Task.OUTCOME_NA
728 task_information['recipe'] = recipe
729 task_information['task_name'] = event.taskname
730 try:
731 # some tasks don't come with a hash. and that's ok
732 task_information['sstate_checksum'] = event.taskhash
733 except AttributeError:
734 pass
735 return task_information
736
737 def _get_layer_version_for_path(self, path):
738 assert path.startswith("/")
739 assert 'build' in self.internal_state
740
741 if self.brbe is None:
742 def _slkey_interactive(layer_version):
743 assert isinstance(layer_version, Layer_Version)
744 return len(layer_version.local_path)
745
746 # Heuristics: we always match recipe to the deepest layer path in the discovered layers
747 for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
748 # we can match to the recipe file path
749 if path.startswith(lvo.local_path):
750 return lvo
751
752 else:
753 br_id, be_id = self.brbe.split(":")
754 from bldcontrol.bbcontroller import getBuildEnvironmentController
755 bc = getBuildEnvironmentController(pk = be_id)
756
757 def _slkey_managed(layer_version):
758 return len(bc.getGitCloneDirectory(layer_version.giturl, layer_version.commit) + layer_version.dirpath)
759
760 # Heuristics: we match the path to where the layers have been checked out
761 for brl in sorted(BuildRequest.objects.get(pk = br_id).brlayer_set.all(), reverse = True, key = _slkey_managed):
762 localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
763 # we get a relative path, unless running in HEAD mode where the path is absolute
764 if not localdirname.startswith("/"):
765 localdirname = os.path.join(bc.be.sourcedir, localdirname)
766 if path.startswith(localdirname):
767 #logger.warn("-- managed: matched path %s with layer %s " % (path, localdirname))
768 # we matched the BRLayer, but we need the layer_version that generated this br
769 for lvo in self.orm_wrapper.layer_version_objects:
770 if brl.name == lvo.layer.name:
771 return lvo
772
773 #if we get here, we didn't read layers correctly; dump whatever information we have on the error log
774 logger.warn("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
775
776 #mockup the new layer
777 unknown_layer, _ = Layer.objects.get_or_create(name="__FIXME__unidentified_layer", layer_index_url="")
778 unknown_layer_version_obj, _ = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
779
780 # append it so we don't run into this error again and again
781 self.orm_wrapper.layer_version_objects.append(unknown_layer_version_obj)
782
783 return unknown_layer_version_obj
784
785 def _get_recipe_information_from_taskfile(self, taskfile):
786 localfilepath = taskfile.split(":")[-1]
787 filepath_flags = ":".join(sorted(taskfile.split(":")[:-1]))
788 layer_version_obj = self._get_layer_version_for_path(localfilepath)
789
790
791
792 recipe_info = {}
793 recipe_info['layer_version'] = layer_version_obj
794 recipe_info['file_path'] = localfilepath
795 recipe_info['pathflags'] = filepath_flags
796
797 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
798 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
799 else:
800 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
801
802 return recipe_info
803
804 def _get_path_information(self, task_object):
805 assert isinstance(task_object, Task)
806 build_stats_format = "{tmpdir}/buildstats/{target}-{machine}/{buildname}/{package}/"
807 build_stats_path = []
808
809 for t in self.internal_state['targets']:
810 target = t.target
811 machine = self.internal_state['build'].machine
812 buildname = self.internal_state['build'].build_name
813 pe, pv = task_object.recipe.version.split(":",1)
814 if len(pe) > 0:
815 package = task_object.recipe.name + "-" + pe + "_" + pv
816 else:
817 package = task_object.recipe.name + "-" + pv
818
819 build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir, target=target,
820 machine=machine, buildname=buildname,
821 package=package))
822
823 return build_stats_path
824
825
826 ################################
827 ## external available methods to store information
828 @staticmethod
829 def _get_data_from_event(event):
830 evdata = None
831 if '_localdata' in vars(event):
832 evdata = event._localdata
833 elif 'data' in vars(event):
834 evdata = event.data
835 else:
836 raise Exception("Event with neither _localdata or data properties")
837 return evdata
838
839 def store_layer_info(self, event):
840 layerinfos = BuildInfoHelper._get_data_from_event(event)
841 self.internal_state['lvs'] = {}
842 for layer in layerinfos:
843 try:
844 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
845 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path']
846 except NotExisting as nee:
847 logger.warn("buildinfohelper: cannot identify layer exception:%s ", nee)
848
849
850 def store_started_build(self, event):
851 assert '_pkgs' in vars(event)
852 build_information = self._get_build_information()
853
854 build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe, self.project)
855
856 self.internal_state['build'] = build_obj
857
858 # save layer version information for this build
859 if not 'lvs' in self.internal_state:
860 logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
861 else:
862 for layer_obj in self.internal_state['lvs']:
863 self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
864
865 del self.internal_state['lvs']
866
867 # create target information
868 target_information = {}
869 target_information['targets'] = event._pkgs
870 target_information['build'] = build_obj
871
872 self.internal_state['targets'] = self.orm_wrapper.create_target_objects(target_information)
873
874 # Save build configuration
875 data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
876
877 # convert the paths from absolute to relative to either the build directory or layer checkouts
878 path_prefixes = []
879
880 if self.brbe is not None:
881 _, be_id = self.brbe.split(":")
882 be = BuildEnvironment.objects.get(pk = be_id)
883 path_prefixes.append(be.builddir)
884
885 for layer in sorted(self.orm_wrapper.layer_version_objects, key = lambda x:len(x.local_path), reverse=True):
886 path_prefixes.append(layer.local_path)
887
888 # we strip the prefixes
889 for k in data:
890 if not bool(data[k]['func']):
891 for vh in data[k]['history']:
892 if not 'documentation.conf' in vh['file']:
893 abs_file_name = vh['file']
894 for pp in path_prefixes:
895 if abs_file_name.startswith(pp + "/"):
896 vh['file']=abs_file_name[len(pp + "/"):]
897 break
898
899 # save the variables
900 self.orm_wrapper.save_build_variables(build_obj, data)
901
902 return self.brbe
903
904
905 def update_target_image_file(self, event):
906 evdata = BuildInfoHelper._get_data_from_event(event)
907
908 for t in self.internal_state['targets']:
909 if t.is_image == True:
910 output_files = list(evdata.viewkeys())
911 for output in output_files:
912 if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
913 self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
914
915 def update_artifact_image_file(self, event):
916 evdata = BuildInfoHelper._get_data_from_event(event)
917 for artifact_path in evdata.keys():
918 self.orm_wrapper.save_artifact_information(self.internal_state['build'], artifact_path, evdata[artifact_path])
919
920 def update_build_information(self, event, errors, warnings, taskfailures):
921 if 'build' in self.internal_state:
922 self.orm_wrapper.update_build_object(self.internal_state['build'], errors, warnings, taskfailures)
923
924
925 def store_license_manifest_path(self, event):
926 deploy_dir = BuildInfoHelper._get_data_from_event(event)['deploy_dir']
927 image_name = BuildInfoHelper._get_data_from_event(event)['image_name']
928 path = deploy_dir + "/licenses/" + image_name + "/license.manifest"
929 for target in self.internal_state['targets']:
930 if target.target in image_name:
931 self.orm_wrapper.update_target_set_license_manifest(target, path)
932
933
934 def store_started_task(self, event):
935 assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped))
936 assert 'taskfile' in vars(event)
937 localfilepath = event.taskfile.split(":")[-1]
938 assert localfilepath.startswith("/")
939
940 identifier = event.taskfile + ":" + event.taskname
941
942 recipe_information = self._get_recipe_information_from_taskfile(event.taskfile)
943 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
944
945 task_information = self._get_task_information(event, recipe)
946 task_information['outcome'] = Task.OUTCOME_NA
947
948 if isinstance(event, bb.runqueue.runQueueTaskSkipped):
949 assert 'reason' in vars(event)
950 task_information['task_executed'] = False
951 if event.reason == "covered":
952 task_information['outcome'] = Task.OUTCOME_COVERED
953 if event.reason == "existing":
954 task_information['outcome'] = Task.OUTCOME_PREBUILT
955 else:
956 task_information['task_executed'] = True
957 if 'noexec' in vars(event) and event.noexec == True:
958 task_information['task_executed'] = False
959 task_information['outcome'] = Task.OUTCOME_EMPTY
960 task_information['script_type'] = Task.CODING_NA
961
962 # do not assign order numbers to scene tasks
963 if not isinstance(event, bb.runqueue.sceneQueueTaskStarted):
964 self.task_order += 1
965 task_information['order'] = self.task_order
966
967 self.orm_wrapper.get_update_task_object(task_information)
968
969 self.internal_state['taskdata'][identifier] = {
970 'outcome': task_information['outcome'],
971 }
972
973
974 def store_tasks_stats(self, event):
975 for (taskfile, taskname, taskstats, recipename) in BuildInfoHelper._get_data_from_event(event):
976 localfilepath = taskfile.split(":")[-1]
977 assert localfilepath.startswith("/")
978
979 recipe_information = self._get_recipe_information_from_taskfile(taskfile)
980 try:
981 if recipe_information['file_path'].startswith(recipe_information['layer_version'].local_path):
982 recipe_information['file_path'] = recipe_information['file_path'][len(recipe_information['layer_version'].local_path):].lstrip("/")
983
984 recipe_object = Recipe.objects.get(layer_version = recipe_information['layer_version'],
985 file_path__endswith = recipe_information['file_path'],
986 name = recipename)
987 except Recipe.DoesNotExist:
988 logger.error("Could not find recipe for recipe_information %s name %s" , pformat(recipe_information), recipename)
989 raise
990
991 task_information = {}
992 task_information['build'] = self.internal_state['build']
993 task_information['recipe'] = recipe_object
994 task_information['task_name'] = taskname
995 task_information['cpu_usage'] = taskstats['cpu_usage']
996 task_information['disk_io'] = taskstats['disk_io']
997 if 'elapsed_time' in taskstats:
998 task_information['elapsed_time'] = taskstats['elapsed_time']
999 self.orm_wrapper.get_update_task_object(task_information, True) # must exist
1000
1001 def update_and_store_task(self, event):
1002 assert 'taskfile' in vars(event)
1003 localfilepath = event.taskfile.split(":")[-1]
1004 assert localfilepath.startswith("/")
1005
1006 identifier = event.taskfile + ":" + event.taskname
1007 if not identifier in self.internal_state['taskdata']:
1008 if isinstance(event, bb.build.TaskBase):
1009 # we do a bit of guessing
1010 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
1011 if len(candidates) == 1:
1012 identifier = candidates[0]
1013
1014 assert identifier in self.internal_state['taskdata']
1015 identifierlist = identifier.split(":")
1016 realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1])
1017 recipe_information = self._get_recipe_information_from_taskfile(realtaskfile)
1018 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1019 task_information = self._get_task_information(event,recipe)
1020
1021 if 'time' in vars(event):
1022 if not 'start_time' in self.internal_state['taskdata'][identifier]:
1023 self.internal_state['taskdata'][identifier]['start_time'] = event.time
1024 else:
1025 task_information['end_time'] = event.time
1026 task_information['start_time'] = self.internal_state['taskdata'][identifier]['start_time']
1027
1028 task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome']
1029
1030 if 'logfile' in vars(event):
1031 task_information['logfile'] = event.logfile
1032
1033 if '_message' in vars(event):
1034 task_information['message'] = event._message
1035
1036 if 'taskflags' in vars(event):
1037 # with TaskStarted, we get even more information
1038 if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1':
1039 task_information['script_type'] = Task.CODING_PYTHON
1040 else:
1041 task_information['script_type'] = Task.CODING_SHELL
1042
1043 if task_information['outcome'] == Task.OUTCOME_NA:
1044 if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)):
1045 task_information['outcome'] = Task.OUTCOME_SUCCESS
1046 del self.internal_state['taskdata'][identifier]
1047
1048 if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)):
1049 task_information['outcome'] = Task.OUTCOME_FAILED
1050 del self.internal_state['taskdata'][identifier]
1051
1052 if not connection.features.autocommits_when_autocommit_is_off:
1053 # we force a sync point here, to get the progress bar to show
1054 if self.autocommit_step % 3 == 0:
1055 transaction.set_autocommit(True)
1056 transaction.set_autocommit(False)
1057 self.autocommit_step += 1
1058
1059 self.orm_wrapper.get_update_task_object(task_information, True) # must exist
1060
1061
1062 def store_missed_state_tasks(self, event):
1063 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
1064
1065 # identifier = fn + taskname + "_setscene"
1066 recipe_information = self._get_recipe_information_from_taskfile(fn)
1067 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1068 mevent = MockEvent()
1069 mevent.taskname = taskname
1070 mevent.taskhash = taskhash
1071 task_information = self._get_task_information(mevent,recipe)
1072
1073 task_information['start_time'] = timezone.now()
1074 task_information['outcome'] = Task.OUTCOME_NA
1075 task_information['sstate_checksum'] = taskhash
1076 task_information['sstate_result'] = Task.SSTATE_MISS
1077 task_information['path_to_sstate_obj'] = sstatefile
1078
1079 self.orm_wrapper.get_update_task_object(task_information)
1080
1081 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
1082
1083 # identifier = fn + taskname + "_setscene"
1084 recipe_information = self._get_recipe_information_from_taskfile(fn)
1085 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1086 mevent = MockEvent()
1087 mevent.taskname = taskname
1088 mevent.taskhash = taskhash
1089 task_information = self._get_task_information(mevent,recipe)
1090
1091 task_information['path_to_sstate_obj'] = sstatefile
1092
1093 self.orm_wrapper.get_update_task_object(task_information)
1094
1095
1096 def store_target_package_data(self, event):
1097 # for all image targets
1098 for target in self.internal_state['targets']:
1099 if target.is_image:
1100 try:
1101 pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
1102 imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'][target.target]
1103 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'])
1104 filedata = BuildInfoHelper._get_data_from_event(event)['filedata'][target.target]
1105 self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
1106 except KeyError:
1107 # we must have not got the data for this image, nothing to save
1108 pass
1109
1110
1111
1112 def store_dependency_information(self, event):
1113 assert '_depgraph' in vars(event)
1114 assert 'layer-priorities' in event._depgraph
1115 assert 'pn' in event._depgraph
1116 assert 'tdepends' in event._depgraph
1117
1118 errormsg = ""
1119
1120 # save layer version priorities
1121 if 'layer-priorities' in event._depgraph.keys():
1122 for lv in event._depgraph['layer-priorities']:
1123 (_, path, _, priority) = lv
1124 layer_version_obj = self._get_layer_version_for_path(path[1:]) # paths start with a ^
1125 assert layer_version_obj is not None
1126 layer_version_obj.priority = priority
1127 layer_version_obj.save()
1128
1129 # save recipe information
1130 self.internal_state['recipes'] = {}
1131 for pn in event._depgraph['pn']:
1132
1133 file_name = event._depgraph['pn'][pn]['filename'].split(":")[-1]
1134 pathflags = ":".join(sorted(event._depgraph['pn'][pn]['filename'].split(":")[:-1]))
1135 layer_version_obj = self._get_layer_version_for_path(file_name)
1136
1137 assert layer_version_obj is not None
1138
1139 recipe_info = {}
1140 recipe_info['name'] = pn
1141 recipe_info['layer_version'] = layer_version_obj
1142
1143 if 'version' in event._depgraph['pn'][pn]:
1144 recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
1145
1146 if 'summary' in event._depgraph['pn'][pn]:
1147 recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
1148
1149 if 'license' in event._depgraph['pn'][pn]:
1150 recipe_info['license'] = event._depgraph['pn'][pn]['license']
1151
1152 if 'description' in event._depgraph['pn'][pn]:
1153 recipe_info['description'] = event._depgraph['pn'][pn]['description']
1154
1155 if 'section' in event._depgraph['pn'][pn]:
1156 recipe_info['section'] = event._depgraph['pn'][pn]['section']
1157
1158 if 'homepage' in event._depgraph['pn'][pn]:
1159 recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
1160
1161 if 'bugtracker' in event._depgraph['pn'][pn]:
1162 recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
1163
1164 recipe_info['file_path'] = file_name
1165 recipe_info['pathflags'] = pathflags
1166
1167 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
1168 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
1169 else:
1170 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
1171
1172 recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
1173 recipe.is_image = False
1174 if 'inherits' in event._depgraph['pn'][pn].keys():
1175 for cls in event._depgraph['pn'][pn]['inherits']:
1176 if cls.endswith('/image.bbclass'):
1177 recipe.is_image = True
1178 break
1179 if recipe.is_image:
1180 for t in self.internal_state['targets']:
1181 if pn == t.target:
1182 t.is_image = True
1183 t.save()
1184 self.internal_state['recipes'][pn] = recipe
1185
1186 # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED
1187
1188 assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split()
1189
1190 # save recipe dependency
1191 # buildtime
1192 recipedeps_objects = []
1193 for recipe in event._depgraph['depends']:
1194 try:
1195 target = self.internal_state['recipes'][recipe]
1196 for dep in event._depgraph['depends'][recipe]:
1197 dependency = self.internal_state['recipes'][dep]
1198 recipedeps_objects.append(Recipe_Dependency( recipe = target,
1199 depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS))
1200 except KeyError as e:
1201 if e not in assume_provided and not str(e).startswith("virtual/"):
1202 errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, e)
1203 Recipe_Dependency.objects.bulk_create(recipedeps_objects)
1204
1205 # save all task information
1206 def _save_a_task(taskdesc):
1207 spec = re.split(r'\.', taskdesc)
1208 pn = ".".join(spec[0:-1])
1209 taskname = spec[-1]
1210 e = event
1211 e.taskname = pn
1212 recipe = self.internal_state['recipes'][pn]
1213 task_info = self._get_task_information(e, recipe)
1214 task_info['task_name'] = taskname
1215 task_obj = self.orm_wrapper.get_update_task_object(task_info)
1216 return task_obj
1217
1218 # create tasks
1219 tasks = {}
1220 for taskdesc in event._depgraph['tdepends']:
1221 tasks[taskdesc] = _save_a_task(taskdesc)
1222
1223 # create dependencies between tasks
1224 taskdeps_objects = []
1225 for taskdesc in event._depgraph['tdepends']:
1226 target = tasks[taskdesc]
1227 for taskdep in event._depgraph['tdepends'][taskdesc]:
1228 if taskdep not in tasks:
1229 # Fetch tasks info is not collected previously
1230 dep = _save_a_task(taskdep)
1231 else:
1232 dep = tasks[taskdep]
1233 taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
1234 Task_Dependency.objects.bulk_create(taskdeps_objects)
1235
1236 if len(errormsg) > 0:
1237 logger.warn("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
1238
1239
1240 def store_build_package_information(self, event):
1241 package_info = BuildInfoHelper._get_data_from_event(event)
1242 self.orm_wrapper.save_build_package_information(self.internal_state['build'],
1243 package_info,
1244 self.internal_state['recipes'],
1245 )
1246
1247 def _store_build_done(self, errorcode):
1248 logger.info("Build exited with errorcode %d", errorcode)
1249 br_id, be_id = self.brbe.split(":")
1250 be = BuildEnvironment.objects.get(pk = be_id)
1251 be.lock = BuildEnvironment.LOCK_LOCK
1252 be.save()
1253 br = BuildRequest.objects.get(pk = br_id)
1254 if errorcode == 0:
1255 # request archival of the project artifacts
1256 br.state = BuildRequest.REQ_ARCHIVE
1257 else:
1258 br.state = BuildRequest.REQ_FAILED
1259 br.save()
1260
1261
1262 def store_log_error(self, text):
1263 mockevent = MockEvent()
1264 mockevent.levelno = formatter.ERROR
1265 mockevent.msg = text
1266 mockevent.pathname = '-- None'
1267 mockevent.lineno = LogMessage.ERROR
1268 self.store_log_event(mockevent)
1269
1270 def store_log_exception(self, text, backtrace = ""):
1271 mockevent = MockEvent()
1272 mockevent.levelno = -1
1273 mockevent.msg = text
1274 mockevent.pathname = backtrace
1275 mockevent.lineno = -1
1276 self.store_log_event(mockevent)
1277
1278
1279 def store_log_event(self, event):
1280 if event.levelno < formatter.WARNING:
1281 return
1282
1283 if 'args' in vars(event):
1284 event.msg = event.msg % event.args
1285
1286 if not 'build' in self.internal_state:
1287 if self.brbe is None:
1288 if not 'backlog' in self.internal_state:
1289 self.internal_state['backlog'] = []
1290 self.internal_state['backlog'].append(event)
1291 return
1292 else: # we're under Toaster control, the build is already created
1293 br, _ = self.brbe.split(":")
1294 buildrequest = BuildRequest.objects.get(pk = br)
1295 self.internal_state['build'] = buildrequest.build
1296
1297 if 'build' in self.internal_state and 'backlog' in self.internal_state:
1298 # if we have a backlog of events, do our best to save them here
1299 if len(self.internal_state['backlog']):
1300 tempevent = self.internal_state['backlog'].pop()
1301 logger.debug(1, "buildinfohelper: Saving stored event %s " % tempevent)
1302 self.store_log_event(tempevent)
1303 else:
1304 logger.info("buildinfohelper: All events saved")
1305 del self.internal_state['backlog']
1306
1307 log_information = {}
1308 log_information['build'] = self.internal_state['build']
1309 if event.levelno == formatter.ERROR:
1310 log_information['level'] = LogMessage.ERROR
1311 elif event.levelno == formatter.WARNING:
1312 log_information['level'] = LogMessage.WARNING
1313 elif event.levelno == -2: # toaster self-logging
1314 log_information['level'] = -2
1315 else:
1316 log_information['level'] = LogMessage.INFO
1317
1318 log_information['message'] = event.msg
1319 log_information['pathname'] = event.pathname
1320 log_information['lineno'] = event.lineno
1321 logger.info("Logging error 2: %s", log_information)
1322 self.orm_wrapper.create_logmessage(log_information)
1323
1324 def close(self, errorcode):
1325 if self.brbe is not None:
1326 self._store_build_done(errorcode)
1327
1328 if 'backlog' in self.internal_state:
1329 if 'build' in self.internal_state:
1330 # we save missed events in the database for the current build
1331 tempevent = self.internal_state['backlog'].pop()
1332 self.store_log_event(tempevent)
1333 else:
1334 # we have no build, and we still have events; something amazingly wrong happend
1335 for event in self.internal_state['backlog']:
1336 logger.error("UNSAVED log: %s", event.msg)
1337
1338 if not connection.features.autocommits_when_autocommit_is_off:
1339 transaction.set_autocommit(True)