blob: 78f1e9274f60524e8b328df00e885878ca5bb1a1 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake ToasterUI Implementation
3#
4# Copyright (C) 2013 Intel Corporation
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
19import sys
20import bb
21import re
22import os
23
24os.environ["DJANGO_SETTINGS_MODULE"] = "toaster.toastermain.settings"
25
26
27from django.utils import timezone
28
29
30def _configure_toaster():
31 """ Add toaster to sys path for importing modules
32 """
33 sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'toaster'))
34_configure_toaster()
35
36from toaster.orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
37from toaster.orm.models import Target_Image_File, BuildArtifact
38from toaster.orm.models import Variable, VariableHistory
39from toaster.orm.models import Package, Package_File, Target_Installed_Package, Target_File
40from toaster.orm.models import Task_Dependency, Package_Dependency
41from toaster.orm.models import Recipe_Dependency
42
43from toaster.orm.models import Project
44from bldcontrol.models import BuildEnvironment, BuildRequest
45
46from bb.msg import BBLogFormatter as formatter
47from django.db import models
48from pprint import pformat
49import logging
50
51from django.db import transaction, connection
52
53# pylint: disable=invalid-name
54# the logger name is standard throughout BitBake
55logger = logging.getLogger("ToasterLogger")
56
57
58class NotExisting(Exception):
59 pass
60
61class ORMWrapper(object):
62 """ This class creates the dictionaries needed to store information in the database
63 following the format defined by the Django models. It is also used to save this
64 information in the database.
65 """
66
67 def __init__(self):
68 self.layer_version_objects = []
Patrick Williamsf1e5d692016-03-30 15:21:19 -050069 self.layer_version_built = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -050070 self.task_objects = {}
71 self.recipe_objects = {}
72
73 @staticmethod
74 def _build_key(**kwargs):
75 key = "0"
76 for k in sorted(kwargs.keys()):
77 if isinstance(kwargs[k], models.Model):
78 key += "-%d" % kwargs[k].id
79 else:
80 key += "-%s" % str(kwargs[k])
81 return key
82
83
84 def _cached_get_or_create(self, clazz, **kwargs):
85 """ This is a memory-cached get_or_create. We assume that the objects will not be created in the
86 database through any other means.
87 """
88
89 assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
90
91 key = ORMWrapper._build_key(**kwargs)
92 dictname = "objects_%s" % clazz.__name__
93 if not dictname in vars(self).keys():
94 vars(self)[dictname] = {}
95
96 created = False
97 if not key in vars(self)[dictname].keys():
Patrick Williamsf1e5d692016-03-30 15:21:19 -050098 vars(self)[dictname][key], created = \
99 clazz.objects.get_or_create(**kwargs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500100
101 return (vars(self)[dictname][key], created)
102
103
104 def _cached_get(self, clazz, **kwargs):
105 """ This is a memory-cached get. We assume that the objects will not change in the database between gets.
106 """
107 assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
108
109 key = ORMWrapper._build_key(**kwargs)
110 dictname = "objects_%s" % clazz.__name__
111
112 if not dictname in vars(self).keys():
113 vars(self)[dictname] = {}
114
115 if not key in vars(self)[dictname].keys():
116 vars(self)[dictname][key] = clazz.objects.get(**kwargs)
117
118 return vars(self)[dictname][key]
119
120 # pylint: disable=no-self-use
121 # we disable detection of no self use in functions because the methods actually work on the object
122 # even if they don't touch self anywhere
123
124 # pylint: disable=bad-continuation
125 # we do not follow the python conventions for continuation indentation due to long lines here
126
127 def create_build_object(self, build_info, brbe, project_id):
128 assert 'machine' in build_info
129 assert 'distro' in build_info
130 assert 'distro_version' in build_info
131 assert 'started_on' in build_info
132 assert 'cooker_log_path' in build_info
133 assert 'build_name' in build_info
134 assert 'bitbake_version' in build_info
135
136 prj = None
137 buildrequest = None
138 if brbe is not None: # this build was triggered by a request from a user
139 logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
140 br, _ = brbe.split(":")
141 buildrequest = BuildRequest.objects.get(pk = br)
142 prj = buildrequest.project
143
144 elif project_id is not None: # this build was triggered by an external system for a specific project
145 logger.debug(1, "buildinfohelper: project is %s" % prj)
146 prj = Project.objects.get(pk = project_id)
147
148 else: # this build was triggered by a legacy system, or command line interactive mode
149 prj = Project.objects.get_default_project()
150 logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj)
151
152
153 if buildrequest is not None:
154 build = buildrequest.build
155 logger.info("Updating existing build, with %s", build_info)
156 build.project = prj
157 build.machine=build_info['machine']
158 build.distro=build_info['distro']
159 build.distro_version=build_info['distro_version']
160 build.cooker_log_path=build_info['cooker_log_path']
161 build.build_name=build_info['build_name']
162 build.bitbake_version=build_info['bitbake_version']
163 build.save()
164
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500165 else:
166 build = Build.objects.create(
167 project = prj,
168 machine=build_info['machine'],
169 distro=build_info['distro'],
170 distro_version=build_info['distro_version'],
171 started_on=build_info['started_on'],
172 completed_on=build_info['started_on'],
173 cooker_log_path=build_info['cooker_log_path'],
174 build_name=build_info['build_name'],
175 bitbake_version=build_info['bitbake_version'])
176
177 logger.debug(1, "buildinfohelper: build is created %s" % build)
178
179 if buildrequest is not None:
180 buildrequest.build = build
181 buildrequest.save()
182
183 return build
184
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500185 @staticmethod
186 def get_or_create_targets(target_info):
187 result = []
188 for target in target_info['targets']:
189 task = ''
190 if ':' in target:
191 target, task = target.split(':', 1)
192 if task.startswith('do_'):
193 task = task[3:]
194 if task == 'build':
195 task = ''
196 obj, created = Target.objects.get_or_create(build=target_info['build'],
197 target=target)
198 if created:
199 obj.is_image = False
200 if task:
201 obj.task = task
202 obj.save()
203 result.append(obj)
204 return result
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500205
206 def update_build_object(self, build, errors, warnings, taskfailures):
207 assert isinstance(build,Build)
208 assert isinstance(errors, int)
209 assert isinstance(warnings, int)
210
211 outcome = Build.SUCCEEDED
212 if errors or taskfailures:
213 outcome = Build.FAILED
214
215 build.completed_on = timezone.now()
216 build.outcome = outcome
217 build.save()
218
219 def update_target_set_license_manifest(self, target, license_manifest_path):
220 target.license_manifest_path = license_manifest_path
221 target.save()
222
223 def get_update_task_object(self, task_information, must_exist = False):
224 assert 'build' in task_information
225 assert 'recipe' in task_information
226 assert 'task_name' in task_information
227
228 # we use must_exist info for database look-up optimization
229 task_object, created = self._cached_get_or_create(Task,
230 build=task_information['build'],
231 recipe=task_information['recipe'],
232 task_name=task_information['task_name']
233 )
234 if created and must_exist:
235 task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
236 raise NotExisting("Task object created when expected to exist", task_information)
237
238 object_changed = False
239 for v in vars(task_object):
240 if v in task_information.keys():
241 if vars(task_object)[v] != task_information[v]:
242 vars(task_object)[v] = task_information[v]
243 object_changed = True
244
245 # update setscene-related information if the task has a setscene
246 if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count():
247 task_object.outcome = Task.OUTCOME_CACHED
248 object_changed = True
249
250 outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
251 recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
252 if outcome_task_setscene == Task.OUTCOME_SUCCESS:
253 task_object.sstate_result = Task.SSTATE_RESTORED
254 object_changed = True
255 elif outcome_task_setscene == Task.OUTCOME_FAILED:
256 task_object.sstate_result = Task.SSTATE_FAILED
257 object_changed = True
258
259 # mark down duration if we have a start time and a current time
260 if 'start_time' in task_information.keys() and 'end_time' in task_information.keys():
261 duration = task_information['end_time'] - task_information['start_time']
262 task_object.elapsed_time = duration
263 object_changed = True
264 del task_information['start_time']
265 del task_information['end_time']
266
267 if object_changed:
268 task_object.save()
269 return task_object
270
271
272 def get_update_recipe_object(self, recipe_information, must_exist = False):
273 assert 'layer_version' in recipe_information
274 assert 'file_path' in recipe_information
275 assert 'pathflags' in recipe_information
276
277 assert not recipe_information['file_path'].startswith("/") # we should have layer-relative paths at all times
278
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500279
280 def update_recipe_obj(recipe_object):
281 object_changed = False
282 for v in vars(recipe_object):
283 if v in recipe_information.keys():
284 object_changed = True
285 vars(recipe_object)[v] = recipe_information[v]
286
287 if object_changed:
288 recipe_object.save()
289
290 recipe, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500291 file_path=recipe_information['file_path'], pathflags = recipe_information['pathflags'])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500292
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500293 update_recipe_obj(recipe)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500294
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500295 built_recipe = None
296 # Create a copy of the recipe for historical puposes and update it
297 for built_layer in self.layer_version_built:
298 if built_layer.layer == recipe_information['layer_version'].layer:
299 built_recipe, c = self._cached_get_or_create(Recipe,
300 layer_version=built_layer,
301 file_path=recipe_information['file_path'],
302 pathflags = recipe_information['pathflags'])
303 update_recipe_obj(built_recipe)
304 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500305
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500306
307 # If we're in analysis mode then we are wholly responsible for the data
308 # and therefore we return the 'real' recipe rather than the build
309 # history copy of the recipe.
310 if recipe_information['layer_version'].build is not None and \
311 recipe_information['layer_version'].build.project == \
312 Project.objects.get_default_project():
313 return recipe
314
315 return built_recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500316
317 def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500318 if isinstance(layer_obj, Layer_Version):
319 # We already found our layer version for this build so just
320 # update it with the new build information
321 logger.debug("We found our layer from toaster")
322 layer_obj.local_path = layer_version_information['local_path']
323 layer_obj.save()
324 self.layer_version_objects.append(layer_obj)
325
326 # create a new copy of this layer version as a snapshot for
327 # historical purposes
328 layer_copy, c = Layer_Version.objects.get_or_create(build=build_obj,
329 layer=layer_obj.layer,
330 commit=layer_version_information['commit'],
331 local_path = layer_version_information['local_path'],
332 )
333 logger.info("created new historical layer version %d", layer_copy.pk)
334
335 self.layer_version_built.append(layer_copy)
336
337 return layer_obj
338
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500339 assert isinstance(build_obj, Build)
340 assert isinstance(layer_obj, Layer)
341 assert 'branch' in layer_version_information
342 assert 'commit' in layer_version_information
343 assert 'priority' in layer_version_information
344 assert 'local_path' in layer_version_information
345
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500346 # If we're doing a command line build then associate this new layer with the
347 # project to avoid it 'contaminating' toaster data
348 project = None
349 if build_obj.project == Project.objects.get_default_project():
350 project = build_obj.project
351
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500352 layer_version_object, _ = Layer_Version.objects.get_or_create(
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500353 build = build_obj,
354 layer = layer_obj,
355 branch = layer_version_information['branch'],
356 commit = layer_version_information['commit'],
357 priority = layer_version_information['priority'],
358 local_path = layer_version_information['local_path'],
359 project=project)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500360
361 self.layer_version_objects.append(layer_version_object)
362
363 return layer_version_object
364
365 def get_update_layer_object(self, layer_information, brbe):
366 assert 'name' in layer_information
367 assert 'layer_index_url' in layer_information
368
369 if brbe is None:
370 layer_object, _ = Layer.objects.get_or_create(
371 name=layer_information['name'],
372 layer_index_url=layer_information['layer_index_url'])
373 return layer_object
374 else:
375 # we are under managed mode; we must match the layer used in the Project Layer
376 br_id, be_id = brbe.split(":")
377
378 # find layer by checkout path;
379 from bldcontrol import bbcontroller
380 bc = bbcontroller.getBuildEnvironmentController(pk = be_id)
381
382 # we might have a race condition here, as the project layers may change between the build trigger and the actual build execution
383 # but we can only match on the layer name, so the worst thing can happen is a mis-identification of the layer, not a total failure
384
385 # note that this is different
386 buildrequest = BuildRequest.objects.get(pk = br_id)
387 for brl in buildrequest.brlayer_set.all():
388 localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
389 # we get a relative path, unless running in HEAD mode where the path is absolute
390 if not localdirname.startswith("/"):
391 localdirname = os.path.join(bc.be.sourcedir, localdirname)
392 #logger.debug(1, "Localdirname %s lcal_path %s" % (localdirname, layer_information['local_path']))
393 if localdirname.startswith(layer_information['local_path']):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500394 # If the build request came from toaster this field
395 # should contain the information from the layer_version
396 # That created this build request.
397 if brl.layer_version:
398 return brl.layer_version
399
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500400 # we matched the BRLayer, but we need the layer_version that generated this BR; reverse of the Project.schedule_build()
401 #logger.debug(1, "Matched %s to BRlayer %s" % (pformat(layer_information["local_path"]), localdirname))
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500402
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403 for pl in buildrequest.project.projectlayer_set.filter(layercommit__layer__name = brl.name):
404 if pl.layercommit.layer.vcs_url == brl.giturl :
405 layer = pl.layercommit.layer
406 layer.save()
407 return layer
408
409 raise NotExisting("Unidentified layer %s" % pformat(layer_information))
410
411
412 def save_target_file_information(self, build_obj, target_obj, filedata):
413 assert isinstance(build_obj, Build)
414 assert isinstance(target_obj, Target)
415 dirs = filedata['dirs']
416 files = filedata['files']
417 syms = filedata['syms']
418
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500419 # always create the root directory as a special case;
420 # note that this is never displayed, so the owner, group,
421 # size, permission are irrelevant
422 tf_obj = Target_File.objects.create(target = target_obj,
423 path = '/',
424 size = 0,
425 owner = '',
426 group = '',
427 permission = '',
428 inodetype = Target_File.ITYPE_DIRECTORY)
429 tf_obj.save()
430
431 # insert directories, ordered by name depth
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500432 for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))):
433 (user, group, size) = d[1:4]
434 permission = d[0][1:]
435 path = d[4].lstrip(".")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500436
437 # we already created the root directory, so ignore any
438 # entry for it
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500439 if len(path) == 0:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500440 continue
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500441
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500442 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
443 if len(parent_path) == 0:
444 parent_path = "/"
445 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
446 tf_obj = Target_File.objects.create(
447 target = target_obj,
448 path = path,
449 size = size,
450 inodetype = Target_File.ITYPE_DIRECTORY,
451 permission = permission,
452 owner = user,
453 group = group,
454 directory = parent_obj)
455
456
457 # we insert files
458 for d in files:
459 (user, group, size) = d[1:4]
460 permission = d[0][1:]
461 path = d[4].lstrip(".")
462 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
463 inodetype = Target_File.ITYPE_REGULAR
464 if d[0].startswith('b'):
465 inodetype = Target_File.ITYPE_BLOCK
466 if d[0].startswith('c'):
467 inodetype = Target_File.ITYPE_CHARACTER
468 if d[0].startswith('p'):
469 inodetype = Target_File.ITYPE_FIFO
470
471 tf_obj = Target_File.objects.create(
472 target = target_obj,
473 path = path,
474 size = size,
475 inodetype = inodetype,
476 permission = permission,
477 owner = user,
478 group = group)
479 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
480 tf_obj.directory = parent_obj
481 tf_obj.save()
482
483 # we insert symlinks
484 for d in syms:
485 (user, group, size) = d[1:4]
486 permission = d[0][1:]
487 path = d[4].lstrip(".")
488 filetarget_path = d[6]
489
490 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
491 if not filetarget_path.startswith("/"):
492 # we have a relative path, get a normalized absolute one
493 filetarget_path = parent_path + "/" + filetarget_path
494 fcp = filetarget_path.split("/")
495 fcpl = []
496 for i in fcp:
497 if i == "..":
498 fcpl.pop()
499 else:
500 fcpl.append(i)
501 filetarget_path = "/".join(fcpl)
502
503 try:
504 filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path)
505 except Target_File.DoesNotExist:
506 # we might have an invalid link; no way to detect this. just set it to None
507 filetarget_obj = None
508
509 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
510
511 tf_obj = Target_File.objects.create(
512 target = target_obj,
513 path = path,
514 size = size,
515 inodetype = Target_File.ITYPE_SYMLINK,
516 permission = permission,
517 owner = user,
518 group = group,
519 directory = parent_obj,
520 sym_target = filetarget_obj)
521
522
523 def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes):
524 assert isinstance(build_obj, Build)
525 assert isinstance(target_obj, Target)
526
527 errormsg = ""
528 for p in packagedict:
529 searchname = p
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500530 if p not in pkgpnmap:
531 logger.warning("Image packages list contains %p, but is"
532 " missing from all packages list where the"
533 " metadata comes from. Skipping...", p)
534 continue
535
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536 if 'OPKGN' in pkgpnmap[p].keys():
537 searchname = pkgpnmap[p]['OPKGN']
538
539 packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
540 if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
541 # fill in everything we can from the runtime-reverse package data
542 try:
543 packagedict[p]['object'].recipe = recipes[pkgpnmap[p]['PN']]
544 packagedict[p]['object'].version = pkgpnmap[p]['PV']
545 packagedict[p]['object'].installed_name = p
546 packagedict[p]['object'].revision = pkgpnmap[p]['PR']
547 packagedict[p]['object'].license = pkgpnmap[p]['LICENSE']
548 packagedict[p]['object'].section = pkgpnmap[p]['SECTION']
549 packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY']
550 packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION']
551 packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
552
553 # no files recorded for this package, so save files info
554 packagefile_objects = []
555 for targetpath in pkgpnmap[p]['FILES_INFO']:
556 targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
557 packagefile_objects.append(Package_File( package = packagedict[p]['object'],
558 path = targetpath,
559 size = targetfilesize))
560 if len(packagefile_objects):
561 Package_File.objects.bulk_create(packagefile_objects)
562 except KeyError as e:
563 errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
564
565 # save disk installed size
566 packagedict[p]['object'].installed_size = packagedict[p]['size']
567 packagedict[p]['object'].save()
568
569 Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
570
571 packagedeps_objs = []
572 for p in packagedict:
573 for (px,deptype) in packagedict[p]['depends']:
574 if deptype == 'depends':
575 tdeptype = Package_Dependency.TYPE_TRDEPENDS
576 elif deptype == 'recommends':
577 tdeptype = Package_Dependency.TYPE_TRECOMMENDS
578
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500579 try:
580 packagedeps_objs.append(Package_Dependency(
581 package = packagedict[p]['object'],
582 depends_on = packagedict[px]['object'],
583 dep_type = tdeptype,
584 target = target_obj))
585 except KeyError as e:
586 logger.warn("Could not add dependency to the package %s "
587 "because %s is an unknown package", p, px)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500588
589 if len(packagedeps_objs) > 0:
590 Package_Dependency.objects.bulk_create(packagedeps_objs)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500591 else:
592 logger.info("No package dependencies created")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500593
594 if len(errormsg) > 0:
595 logger.warn("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
596
597 def save_target_image_file_information(self, target_obj, file_name, file_size):
598 Target_Image_File.objects.create( target = target_obj,
599 file_name = file_name,
600 file_size = file_size)
601
602 def save_artifact_information(self, build_obj, file_name, file_size):
603 # we skip the image files from other builds
604 if Target_Image_File.objects.filter(file_name = file_name).count() > 0:
605 return
606
607 # do not update artifacts found in other builds
608 if BuildArtifact.objects.filter(file_name = file_name).count() > 0:
609 return
610
611 BuildArtifact.objects.create(build = build_obj, file_name = file_name, file_size = file_size)
612
613 def create_logmessage(self, log_information):
614 assert 'build' in log_information
615 assert 'level' in log_information
616 assert 'message' in log_information
617
618 log_object = LogMessage.objects.create(
619 build = log_information['build'],
620 level = log_information['level'],
621 message = log_information['message'])
622
623 for v in vars(log_object):
624 if v in log_information.keys():
625 vars(log_object)[v] = log_information[v]
626
627 return log_object.save()
628
629
630 def save_build_package_information(self, build_obj, package_info, recipes):
631 assert isinstance(build_obj, Build)
632
633 # create and save the object
634 pname = package_info['PKG']
635 if 'OPKGN' in package_info.keys():
636 pname = package_info['OPKGN']
637
638 bp_object, _ = Package.objects.get_or_create( build = build_obj,
639 name = pname )
640
641 bp_object.installed_name = package_info['PKG']
642 bp_object.recipe = recipes[package_info['PN']]
643 bp_object.version = package_info['PKGV']
644 bp_object.revision = package_info['PKGR']
645 bp_object.summary = package_info['SUMMARY']
646 bp_object.description = package_info['DESCRIPTION']
647 bp_object.size = int(package_info['PKGSIZE'])
648 bp_object.section = package_info['SECTION']
649 bp_object.license = package_info['LICENSE']
650 bp_object.save()
651
652 # save any attached file information
653 packagefile_objects = []
654 for path in package_info['FILES_INFO']:
655 packagefile_objects.append(Package_File( package = bp_object,
656 path = path,
657 size = package_info['FILES_INFO'][path] ))
658 if len(packagefile_objects):
659 Package_File.objects.bulk_create(packagefile_objects)
660
661 def _po_byname(p):
662 pkg, created = Package.objects.get_or_create(build = build_obj, name = p)
663 if created:
664 pkg.size = -1
665 pkg.save()
666 return pkg
667
668 packagedeps_objs = []
669 # save soft dependency information
670 if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
671 for p in bb.utils.explode_deps(package_info['RDEPENDS']):
672 packagedeps_objs.append(Package_Dependency( package = bp_object,
673 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
674 if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
675 for p in bb.utils.explode_deps(package_info['RPROVIDES']):
676 packagedeps_objs.append(Package_Dependency( package = bp_object,
677 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
678 if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
679 for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
680 packagedeps_objs.append(Package_Dependency( package = bp_object,
681 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
682 if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
683 for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
684 packagedeps_objs.append(Package_Dependency( package = bp_object,
685 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
686 if 'RREPLACES' in package_info and package_info['RREPLACES']:
687 for p in bb.utils.explode_deps(package_info['RREPLACES']):
688 packagedeps_objs.append(Package_Dependency( package = bp_object,
689 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
690 if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
691 for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
692 packagedeps_objs.append(Package_Dependency( package = bp_object,
693 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
694
695 if len(packagedeps_objs) > 0:
696 Package_Dependency.objects.bulk_create(packagedeps_objs)
697
698 return bp_object
699
700 def save_build_variables(self, build_obj, vardump):
701 assert isinstance(build_obj, Build)
702
703 helptext_objects = []
704 for k in vardump:
705 desc = vardump[k]['doc']
706 if desc is None:
707 var_words = [word for word in k.split('_')]
708 root_var = "_".join([word for word in var_words if word.isupper()])
709 if root_var and root_var != k and root_var in vardump:
710 desc = vardump[root_var]['doc']
711 if desc is None:
712 desc = ''
713 if len(desc):
714 helptext_objects.append(HelpText(build=build_obj,
715 area=HelpText.VARIABLE,
716 key=k,
717 text=desc))
718 if not bool(vardump[k]['func']):
719 value = vardump[k]['v']
720 if value is None:
721 value = ''
722 variable_obj = Variable.objects.create( build = build_obj,
723 variable_name = k,
724 variable_value = value,
725 description = desc)
726
727 varhist_objects = []
728 for vh in vardump[k]['history']:
729 if not 'documentation.conf' in vh['file']:
730 varhist_objects.append(VariableHistory( variable = variable_obj,
731 file_name = vh['file'],
732 line_number = vh['line'],
733 operation = vh['op']))
734 if len(varhist_objects):
735 VariableHistory.objects.bulk_create(varhist_objects)
736
737 HelpText.objects.bulk_create(helptext_objects)
738
739
740class MockEvent(object):
741 """ This object is used to create event, for which normal event-processing methods can
742 be used, out of data that is not coming via an actual event
743 """
744 def __init__(self):
745 self.msg = None
746 self.levelno = None
747 self.taskname = None
748 self.taskhash = None
749 self.pathname = None
750 self.lineno = None
751
752
753class BuildInfoHelper(object):
754 """ This class gathers the build information from the server and sends it
755 towards the ORM wrapper for storing in the database
756 It is instantiated once per build
757 Keeps in memory all data that needs matching before writing it to the database
758 """
759
760 # pylint: disable=protected-access
761 # the code will look into the protected variables of the event; no easy way around this
762 # pylint: disable=bad-continuation
763 # we do not follow the python conventions for continuation indentation due to long lines here
764
765 def __init__(self, server, has_build_history = False):
766 self.internal_state = {}
767 self.internal_state['taskdata'] = {}
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500768 self.internal_state['targets'] = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500769 self.task_order = 0
770 self.autocommit_step = 1
771 self.server = server
772 # we use manual transactions if the database doesn't autocommit on us
773 if not connection.features.autocommits_when_autocommit_is_off:
774 transaction.set_autocommit(False)
775 self.orm_wrapper = ORMWrapper()
776 self.has_build_history = has_build_history
777 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
778 self.brbe = self.server.runCommand(["getVariable", "TOASTER_BRBE"])[0]
779 self.project = self.server.runCommand(["getVariable", "TOASTER_PROJECT"])[0]
780 logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
781
782
783 ###################
784 ## methods to convert event/external info into objects that the ORM layer uses
785
786
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500787 def _get_build_information(self, build_log_path):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500788 build_info = {}
789 # Generate an identifier for each new build
790
791 build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
792 build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
793 build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
794 build_info['started_on'] = timezone.now()
795 build_info['completed_on'] = timezone.now()
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500796 build_info['cooker_log_path'] = build_log_path
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500797 build_info['build_name'] = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
798 build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
799
800 return build_info
801
802 def _get_task_information(self, event, recipe):
803 assert 'taskname' in vars(event)
804
805 task_information = {}
806 task_information['build'] = self.internal_state['build']
807 task_information['outcome'] = Task.OUTCOME_NA
808 task_information['recipe'] = recipe
809 task_information['task_name'] = event.taskname
810 try:
811 # some tasks don't come with a hash. and that's ok
812 task_information['sstate_checksum'] = event.taskhash
813 except AttributeError:
814 pass
815 return task_information
816
817 def _get_layer_version_for_path(self, path):
818 assert path.startswith("/")
819 assert 'build' in self.internal_state
820
821 if self.brbe is None:
822 def _slkey_interactive(layer_version):
823 assert isinstance(layer_version, Layer_Version)
824 return len(layer_version.local_path)
825
826 # Heuristics: we always match recipe to the deepest layer path in the discovered layers
827 for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
828 # we can match to the recipe file path
829 if path.startswith(lvo.local_path):
830 return lvo
831
832 else:
833 br_id, be_id = self.brbe.split(":")
834 from bldcontrol.bbcontroller import getBuildEnvironmentController
835 bc = getBuildEnvironmentController(pk = be_id)
836
837 def _slkey_managed(layer_version):
838 return len(bc.getGitCloneDirectory(layer_version.giturl, layer_version.commit) + layer_version.dirpath)
839
840 # Heuristics: we match the path to where the layers have been checked out
841 for brl in sorted(BuildRequest.objects.get(pk = br_id).brlayer_set.all(), reverse = True, key = _slkey_managed):
842 localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
843 # we get a relative path, unless running in HEAD mode where the path is absolute
844 if not localdirname.startswith("/"):
845 localdirname = os.path.join(bc.be.sourcedir, localdirname)
846 if path.startswith(localdirname):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500847 # If the build request came from toaster this field
848 # should contain the information from the layer_version
849 # That created this build request.
850 if brl.layer_version:
851 return brl.layer_version
852
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500853 #logger.warn("-- managed: matched path %s with layer %s " % (path, localdirname))
854 # we matched the BRLayer, but we need the layer_version that generated this br
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500855
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500856 for lvo in self.orm_wrapper.layer_version_objects:
857 if brl.name == lvo.layer.name:
858 return lvo
859
860 #if we get here, we didn't read layers correctly; dump whatever information we have on the error log
861 logger.warn("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
862
863 #mockup the new layer
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500864 unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500865 unknown_layer_version_obj, _ = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
866
867 # append it so we don't run into this error again and again
868 self.orm_wrapper.layer_version_objects.append(unknown_layer_version_obj)
869
870 return unknown_layer_version_obj
871
872 def _get_recipe_information_from_taskfile(self, taskfile):
873 localfilepath = taskfile.split(":")[-1]
874 filepath_flags = ":".join(sorted(taskfile.split(":")[:-1]))
875 layer_version_obj = self._get_layer_version_for_path(localfilepath)
876
877
878
879 recipe_info = {}
880 recipe_info['layer_version'] = layer_version_obj
881 recipe_info['file_path'] = localfilepath
882 recipe_info['pathflags'] = filepath_flags
883
884 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
885 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
886 else:
887 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
888
889 return recipe_info
890
891 def _get_path_information(self, task_object):
892 assert isinstance(task_object, Task)
893 build_stats_format = "{tmpdir}/buildstats/{target}-{machine}/{buildname}/{package}/"
894 build_stats_path = []
895
896 for t in self.internal_state['targets']:
897 target = t.target
898 machine = self.internal_state['build'].machine
899 buildname = self.internal_state['build'].build_name
900 pe, pv = task_object.recipe.version.split(":",1)
901 if len(pe) > 0:
902 package = task_object.recipe.name + "-" + pe + "_" + pv
903 else:
904 package = task_object.recipe.name + "-" + pv
905
906 build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir, target=target,
907 machine=machine, buildname=buildname,
908 package=package))
909
910 return build_stats_path
911
912
913 ################################
914 ## external available methods to store information
915 @staticmethod
916 def _get_data_from_event(event):
917 evdata = None
918 if '_localdata' in vars(event):
919 evdata = event._localdata
920 elif 'data' in vars(event):
921 evdata = event.data
922 else:
923 raise Exception("Event with neither _localdata or data properties")
924 return evdata
925
926 def store_layer_info(self, event):
927 layerinfos = BuildInfoHelper._get_data_from_event(event)
928 self.internal_state['lvs'] = {}
929 for layer in layerinfos:
930 try:
931 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
932 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path']
933 except NotExisting as nee:
934 logger.warn("buildinfohelper: cannot identify layer exception:%s ", nee)
935
936
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500937 def store_started_build(self, event, build_log_path):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500938 assert '_pkgs' in vars(event)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500939 build_information = self._get_build_information(build_log_path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500940
941 build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe, self.project)
942
943 self.internal_state['build'] = build_obj
944
945 # save layer version information for this build
946 if not 'lvs' in self.internal_state:
947 logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
948 else:
949 for layer_obj in self.internal_state['lvs']:
950 self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
951
952 del self.internal_state['lvs']
953
954 # create target information
955 target_information = {}
956 target_information['targets'] = event._pkgs
957 target_information['build'] = build_obj
958
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500959 self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500960
961 # Save build configuration
962 data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
963
964 # convert the paths from absolute to relative to either the build directory or layer checkouts
965 path_prefixes = []
966
967 if self.brbe is not None:
968 _, be_id = self.brbe.split(":")
969 be = BuildEnvironment.objects.get(pk = be_id)
970 path_prefixes.append(be.builddir)
971
972 for layer in sorted(self.orm_wrapper.layer_version_objects, key = lambda x:len(x.local_path), reverse=True):
973 path_prefixes.append(layer.local_path)
974
975 # we strip the prefixes
976 for k in data:
977 if not bool(data[k]['func']):
978 for vh in data[k]['history']:
979 if not 'documentation.conf' in vh['file']:
980 abs_file_name = vh['file']
981 for pp in path_prefixes:
982 if abs_file_name.startswith(pp + "/"):
983 vh['file']=abs_file_name[len(pp + "/"):]
984 break
985
986 # save the variables
987 self.orm_wrapper.save_build_variables(build_obj, data)
988
989 return self.brbe
990
991
992 def update_target_image_file(self, event):
993 evdata = BuildInfoHelper._get_data_from_event(event)
994
995 for t in self.internal_state['targets']:
996 if t.is_image == True:
997 output_files = list(evdata.viewkeys())
998 for output in output_files:
999 if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
1000 self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
1001
1002 def update_artifact_image_file(self, event):
1003 evdata = BuildInfoHelper._get_data_from_event(event)
1004 for artifact_path in evdata.keys():
1005 self.orm_wrapper.save_artifact_information(self.internal_state['build'], artifact_path, evdata[artifact_path])
1006
1007 def update_build_information(self, event, errors, warnings, taskfailures):
1008 if 'build' in self.internal_state:
1009 self.orm_wrapper.update_build_object(self.internal_state['build'], errors, warnings, taskfailures)
1010
1011
1012 def store_license_manifest_path(self, event):
1013 deploy_dir = BuildInfoHelper._get_data_from_event(event)['deploy_dir']
1014 image_name = BuildInfoHelper._get_data_from_event(event)['image_name']
1015 path = deploy_dir + "/licenses/" + image_name + "/license.manifest"
1016 for target in self.internal_state['targets']:
1017 if target.target in image_name:
1018 self.orm_wrapper.update_target_set_license_manifest(target, path)
1019
1020
1021 def store_started_task(self, event):
1022 assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped))
1023 assert 'taskfile' in vars(event)
1024 localfilepath = event.taskfile.split(":")[-1]
1025 assert localfilepath.startswith("/")
1026
1027 identifier = event.taskfile + ":" + event.taskname
1028
1029 recipe_information = self._get_recipe_information_from_taskfile(event.taskfile)
1030 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1031
1032 task_information = self._get_task_information(event, recipe)
1033 task_information['outcome'] = Task.OUTCOME_NA
1034
1035 if isinstance(event, bb.runqueue.runQueueTaskSkipped):
1036 assert 'reason' in vars(event)
1037 task_information['task_executed'] = False
1038 if event.reason == "covered":
1039 task_information['outcome'] = Task.OUTCOME_COVERED
1040 if event.reason == "existing":
1041 task_information['outcome'] = Task.OUTCOME_PREBUILT
1042 else:
1043 task_information['task_executed'] = True
1044 if 'noexec' in vars(event) and event.noexec == True:
1045 task_information['task_executed'] = False
1046 task_information['outcome'] = Task.OUTCOME_EMPTY
1047 task_information['script_type'] = Task.CODING_NA
1048
1049 # do not assign order numbers to scene tasks
1050 if not isinstance(event, bb.runqueue.sceneQueueTaskStarted):
1051 self.task_order += 1
1052 task_information['order'] = self.task_order
1053
1054 self.orm_wrapper.get_update_task_object(task_information)
1055
1056 self.internal_state['taskdata'][identifier] = {
1057 'outcome': task_information['outcome'],
1058 }
1059
1060
1061 def store_tasks_stats(self, event):
1062 for (taskfile, taskname, taskstats, recipename) in BuildInfoHelper._get_data_from_event(event):
1063 localfilepath = taskfile.split(":")[-1]
1064 assert localfilepath.startswith("/")
1065
1066 recipe_information = self._get_recipe_information_from_taskfile(taskfile)
1067 try:
1068 if recipe_information['file_path'].startswith(recipe_information['layer_version'].local_path):
1069 recipe_information['file_path'] = recipe_information['file_path'][len(recipe_information['layer_version'].local_path):].lstrip("/")
1070
1071 recipe_object = Recipe.objects.get(layer_version = recipe_information['layer_version'],
1072 file_path__endswith = recipe_information['file_path'],
1073 name = recipename)
1074 except Recipe.DoesNotExist:
1075 logger.error("Could not find recipe for recipe_information %s name %s" , pformat(recipe_information), recipename)
1076 raise
1077
1078 task_information = {}
1079 task_information['build'] = self.internal_state['build']
1080 task_information['recipe'] = recipe_object
1081 task_information['task_name'] = taskname
1082 task_information['cpu_usage'] = taskstats['cpu_usage']
1083 task_information['disk_io'] = taskstats['disk_io']
1084 if 'elapsed_time' in taskstats:
1085 task_information['elapsed_time'] = taskstats['elapsed_time']
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001086 self.orm_wrapper.get_update_task_object(task_information)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001087
1088 def update_and_store_task(self, event):
1089 assert 'taskfile' in vars(event)
1090 localfilepath = event.taskfile.split(":")[-1]
1091 assert localfilepath.startswith("/")
1092
1093 identifier = event.taskfile + ":" + event.taskname
1094 if not identifier in self.internal_state['taskdata']:
1095 if isinstance(event, bb.build.TaskBase):
1096 # we do a bit of guessing
1097 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
1098 if len(candidates) == 1:
1099 identifier = candidates[0]
1100
1101 assert identifier in self.internal_state['taskdata']
1102 identifierlist = identifier.split(":")
1103 realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1])
1104 recipe_information = self._get_recipe_information_from_taskfile(realtaskfile)
1105 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1106 task_information = self._get_task_information(event,recipe)
1107
1108 if 'time' in vars(event):
1109 if not 'start_time' in self.internal_state['taskdata'][identifier]:
1110 self.internal_state['taskdata'][identifier]['start_time'] = event.time
1111 else:
1112 task_information['end_time'] = event.time
1113 task_information['start_time'] = self.internal_state['taskdata'][identifier]['start_time']
1114
1115 task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome']
1116
1117 if 'logfile' in vars(event):
1118 task_information['logfile'] = event.logfile
1119
1120 if '_message' in vars(event):
1121 task_information['message'] = event._message
1122
1123 if 'taskflags' in vars(event):
1124 # with TaskStarted, we get even more information
1125 if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1':
1126 task_information['script_type'] = Task.CODING_PYTHON
1127 else:
1128 task_information['script_type'] = Task.CODING_SHELL
1129
1130 if task_information['outcome'] == Task.OUTCOME_NA:
1131 if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)):
1132 task_information['outcome'] = Task.OUTCOME_SUCCESS
1133 del self.internal_state['taskdata'][identifier]
1134
1135 if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)):
1136 task_information['outcome'] = Task.OUTCOME_FAILED
1137 del self.internal_state['taskdata'][identifier]
1138
1139 if not connection.features.autocommits_when_autocommit_is_off:
1140 # we force a sync point here, to get the progress bar to show
1141 if self.autocommit_step % 3 == 0:
1142 transaction.set_autocommit(True)
1143 transaction.set_autocommit(False)
1144 self.autocommit_step += 1
1145
1146 self.orm_wrapper.get_update_task_object(task_information, True) # must exist
1147
1148
1149 def store_missed_state_tasks(self, event):
1150 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
1151
1152 # identifier = fn + taskname + "_setscene"
1153 recipe_information = self._get_recipe_information_from_taskfile(fn)
1154 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1155 mevent = MockEvent()
1156 mevent.taskname = taskname
1157 mevent.taskhash = taskhash
1158 task_information = self._get_task_information(mevent,recipe)
1159
1160 task_information['start_time'] = timezone.now()
1161 task_information['outcome'] = Task.OUTCOME_NA
1162 task_information['sstate_checksum'] = taskhash
1163 task_information['sstate_result'] = Task.SSTATE_MISS
1164 task_information['path_to_sstate_obj'] = sstatefile
1165
1166 self.orm_wrapper.get_update_task_object(task_information)
1167
1168 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
1169
1170 # identifier = fn + taskname + "_setscene"
1171 recipe_information = self._get_recipe_information_from_taskfile(fn)
1172 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1173 mevent = MockEvent()
1174 mevent.taskname = taskname
1175 mevent.taskhash = taskhash
1176 task_information = self._get_task_information(mevent,recipe)
1177
1178 task_information['path_to_sstate_obj'] = sstatefile
1179
1180 self.orm_wrapper.get_update_task_object(task_information)
1181
1182
1183 def store_target_package_data(self, event):
1184 # for all image targets
1185 for target in self.internal_state['targets']:
1186 if target.is_image:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001187 pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
1188 imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'][target.target]
1189 filedata = BuildInfoHelper._get_data_from_event(event)['filedata'][target.target]
1190
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001191 try:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001192 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'])
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001193 except KeyError as e:
1194 logger.warn("KeyError in save_target_package_information"
1195 "%s ", e)
1196
1197 try:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001198 self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001199 except KeyError as e:
1200 logger.warn("KeyError in save_target_file_information"
1201 "%s ", e)
1202
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001203
1204
1205
1206 def store_dependency_information(self, event):
1207 assert '_depgraph' in vars(event)
1208 assert 'layer-priorities' in event._depgraph
1209 assert 'pn' in event._depgraph
1210 assert 'tdepends' in event._depgraph
1211
1212 errormsg = ""
1213
1214 # save layer version priorities
1215 if 'layer-priorities' in event._depgraph.keys():
1216 for lv in event._depgraph['layer-priorities']:
1217 (_, path, _, priority) = lv
1218 layer_version_obj = self._get_layer_version_for_path(path[1:]) # paths start with a ^
1219 assert layer_version_obj is not None
1220 layer_version_obj.priority = priority
1221 layer_version_obj.save()
1222
1223 # save recipe information
1224 self.internal_state['recipes'] = {}
1225 for pn in event._depgraph['pn']:
1226
1227 file_name = event._depgraph['pn'][pn]['filename'].split(":")[-1]
1228 pathflags = ":".join(sorted(event._depgraph['pn'][pn]['filename'].split(":")[:-1]))
1229 layer_version_obj = self._get_layer_version_for_path(file_name)
1230
1231 assert layer_version_obj is not None
1232
1233 recipe_info = {}
1234 recipe_info['name'] = pn
1235 recipe_info['layer_version'] = layer_version_obj
1236
1237 if 'version' in event._depgraph['pn'][pn]:
1238 recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
1239
1240 if 'summary' in event._depgraph['pn'][pn]:
1241 recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
1242
1243 if 'license' in event._depgraph['pn'][pn]:
1244 recipe_info['license'] = event._depgraph['pn'][pn]['license']
1245
1246 if 'description' in event._depgraph['pn'][pn]:
1247 recipe_info['description'] = event._depgraph['pn'][pn]['description']
1248
1249 if 'section' in event._depgraph['pn'][pn]:
1250 recipe_info['section'] = event._depgraph['pn'][pn]['section']
1251
1252 if 'homepage' in event._depgraph['pn'][pn]:
1253 recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
1254
1255 if 'bugtracker' in event._depgraph['pn'][pn]:
1256 recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
1257
1258 recipe_info['file_path'] = file_name
1259 recipe_info['pathflags'] = pathflags
1260
1261 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
1262 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
1263 else:
1264 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
1265
1266 recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
1267 recipe.is_image = False
1268 if 'inherits' in event._depgraph['pn'][pn].keys():
1269 for cls in event._depgraph['pn'][pn]['inherits']:
1270 if cls.endswith('/image.bbclass'):
1271 recipe.is_image = True
1272 break
1273 if recipe.is_image:
1274 for t in self.internal_state['targets']:
1275 if pn == t.target:
1276 t.is_image = True
1277 t.save()
1278 self.internal_state['recipes'][pn] = recipe
1279
1280 # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED
1281
1282 assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split()
1283
1284 # save recipe dependency
1285 # buildtime
1286 recipedeps_objects = []
1287 for recipe in event._depgraph['depends']:
1288 try:
1289 target = self.internal_state['recipes'][recipe]
1290 for dep in event._depgraph['depends'][recipe]:
1291 dependency = self.internal_state['recipes'][dep]
1292 recipedeps_objects.append(Recipe_Dependency( recipe = target,
1293 depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS))
1294 except KeyError as e:
1295 if e not in assume_provided and not str(e).startswith("virtual/"):
1296 errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, e)
1297 Recipe_Dependency.objects.bulk_create(recipedeps_objects)
1298
1299 # save all task information
1300 def _save_a_task(taskdesc):
1301 spec = re.split(r'\.', taskdesc)
1302 pn = ".".join(spec[0:-1])
1303 taskname = spec[-1]
1304 e = event
1305 e.taskname = pn
1306 recipe = self.internal_state['recipes'][pn]
1307 task_info = self._get_task_information(e, recipe)
1308 task_info['task_name'] = taskname
1309 task_obj = self.orm_wrapper.get_update_task_object(task_info)
1310 return task_obj
1311
1312 # create tasks
1313 tasks = {}
1314 for taskdesc in event._depgraph['tdepends']:
1315 tasks[taskdesc] = _save_a_task(taskdesc)
1316
1317 # create dependencies between tasks
1318 taskdeps_objects = []
1319 for taskdesc in event._depgraph['tdepends']:
1320 target = tasks[taskdesc]
1321 for taskdep in event._depgraph['tdepends'][taskdesc]:
1322 if taskdep not in tasks:
1323 # Fetch tasks info is not collected previously
1324 dep = _save_a_task(taskdep)
1325 else:
1326 dep = tasks[taskdep]
1327 taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
1328 Task_Dependency.objects.bulk_create(taskdeps_objects)
1329
1330 if len(errormsg) > 0:
1331 logger.warn("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
1332
1333
1334 def store_build_package_information(self, event):
1335 package_info = BuildInfoHelper._get_data_from_event(event)
1336 self.orm_wrapper.save_build_package_information(self.internal_state['build'],
1337 package_info,
1338 self.internal_state['recipes'],
1339 )
1340
1341 def _store_build_done(self, errorcode):
1342 logger.info("Build exited with errorcode %d", errorcode)
1343 br_id, be_id = self.brbe.split(":")
1344 be = BuildEnvironment.objects.get(pk = be_id)
1345 be.lock = BuildEnvironment.LOCK_LOCK
1346 be.save()
1347 br = BuildRequest.objects.get(pk = br_id)
1348 if errorcode == 0:
1349 # request archival of the project artifacts
1350 br.state = BuildRequest.REQ_ARCHIVE
1351 else:
1352 br.state = BuildRequest.REQ_FAILED
1353 br.save()
1354
1355
1356 def store_log_error(self, text):
1357 mockevent = MockEvent()
1358 mockevent.levelno = formatter.ERROR
1359 mockevent.msg = text
1360 mockevent.pathname = '-- None'
1361 mockevent.lineno = LogMessage.ERROR
1362 self.store_log_event(mockevent)
1363
1364 def store_log_exception(self, text, backtrace = ""):
1365 mockevent = MockEvent()
1366 mockevent.levelno = -1
1367 mockevent.msg = text
1368 mockevent.pathname = backtrace
1369 mockevent.lineno = -1
1370 self.store_log_event(mockevent)
1371
1372
1373 def store_log_event(self, event):
1374 if event.levelno < formatter.WARNING:
1375 return
1376
1377 if 'args' in vars(event):
1378 event.msg = event.msg % event.args
1379
1380 if not 'build' in self.internal_state:
1381 if self.brbe is None:
1382 if not 'backlog' in self.internal_state:
1383 self.internal_state['backlog'] = []
1384 self.internal_state['backlog'].append(event)
1385 return
1386 else: # we're under Toaster control, the build is already created
1387 br, _ = self.brbe.split(":")
1388 buildrequest = BuildRequest.objects.get(pk = br)
1389 self.internal_state['build'] = buildrequest.build
1390
1391 if 'build' in self.internal_state and 'backlog' in self.internal_state:
1392 # if we have a backlog of events, do our best to save them here
1393 if len(self.internal_state['backlog']):
1394 tempevent = self.internal_state['backlog'].pop()
1395 logger.debug(1, "buildinfohelper: Saving stored event %s " % tempevent)
1396 self.store_log_event(tempevent)
1397 else:
1398 logger.info("buildinfohelper: All events saved")
1399 del self.internal_state['backlog']
1400
1401 log_information = {}
1402 log_information['build'] = self.internal_state['build']
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001403 if event.levelno == formatter.CRITICAL:
1404 log_information['level'] = LogMessage.CRITICAL
1405 elif event.levelno == formatter.ERROR:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001406 log_information['level'] = LogMessage.ERROR
1407 elif event.levelno == formatter.WARNING:
1408 log_information['level'] = LogMessage.WARNING
1409 elif event.levelno == -2: # toaster self-logging
1410 log_information['level'] = -2
1411 else:
1412 log_information['level'] = LogMessage.INFO
1413
1414 log_information['message'] = event.msg
1415 log_information['pathname'] = event.pathname
1416 log_information['lineno'] = event.lineno
1417 logger.info("Logging error 2: %s", log_information)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001418
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001419 self.orm_wrapper.create_logmessage(log_information)
1420
1421 def close(self, errorcode):
1422 if self.brbe is not None:
1423 self._store_build_done(errorcode)
1424
1425 if 'backlog' in self.internal_state:
1426 if 'build' in self.internal_state:
1427 # we save missed events in the database for the current build
1428 tempevent = self.internal_state['backlog'].pop()
1429 self.store_log_event(tempevent)
1430 else:
1431 # we have no build, and we still have events; something amazingly wrong happend
1432 for event in self.internal_state['backlog']:
1433 logger.error("UNSAVED log: %s", event.msg)
1434
1435 if not connection.features.autocommits_when_autocommit_is_off:
1436 transaction.set_autocommit(True)